[ 448.708304] env[61648]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61648) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 448.708740] env[61648]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61648) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 448.708740] env[61648]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61648) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 448.709080] env[61648]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 448.806885] env[61648]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61648) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 448.816454] env[61648]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61648) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 449.418773] env[61648]: INFO nova.virt.driver [None req-49146926-2f1a-42ad-9ae7-ca888071214f None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 449.489584] env[61648]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 449.489765] env[61648]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 449.489818] env[61648]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61648) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 452.637123] env[61648]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-548b1ab6-c64f-4d5e-a5ac-903398414684 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.653430] env[61648]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61648) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 452.653589] env[61648]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-7a411aab-c19f-4ed2-9b35-659dfed076c4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.680110] env[61648]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 0e37e. [ 452.680260] env[61648]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.191s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 452.680792] env[61648]: INFO nova.virt.vmwareapi.driver [None req-49146926-2f1a-42ad-9ae7-ca888071214f None None] VMware vCenter version: 7.0.3 [ 452.684195] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118b6fbd-b7e5-4ba9-b6bb-52a5bcc73690 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.701825] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35475142-cb84-473d-9bcf-149c372b0a49 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.707739] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a0bbcb-5887-4072-a386-9aec6c522072 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.714245] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a53c1db-9e0e-44f3-8189-c5ac18d450f5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.726982] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31637c4c-3ed8-4404-9bc1-17d0b9b10f48 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.732985] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ee5bf3-923f-4320-8737-26cd5e05241f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.762447] env[61648]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-866a256d-715e-47b8-a6c3-a16021270ba3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 452.767526] env[61648]: DEBUG nova.virt.vmwareapi.driver [None req-49146926-2f1a-42ad-9ae7-ca888071214f None None] Extension org.openstack.compute already exists. {{(pid=61648) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 452.770143] env[61648]: INFO nova.compute.provider_config [None req-49146926-2f1a-42ad-9ae7-ca888071214f None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 453.273823] env[61648]: DEBUG nova.context [None req-49146926-2f1a-42ad-9ae7-ca888071214f None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),ab807b3f-dc86-497f-9347-43e0418a09b0(cell1) {{(pid=61648) load_cells /opt/stack/nova/nova/context.py:464}} [ 453.276409] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 453.276651] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 453.277388] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 453.277824] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Acquiring lock "ab807b3f-dc86-497f-9347-43e0418a09b0" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 453.278017] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Lock "ab807b3f-dc86-497f-9347-43e0418a09b0" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 453.279100] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Lock "ab807b3f-dc86-497f-9347-43e0418a09b0" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 453.299435] env[61648]: INFO dbcounter [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Registered counter for database nova_cell0 [ 453.307576] env[61648]: INFO dbcounter [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Registered counter for database nova_cell1 [ 453.311735] env[61648]: DEBUG oslo_db.sqlalchemy.engines [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61648) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 453.312096] env[61648]: DEBUG oslo_db.sqlalchemy.engines [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61648) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 453.316803] env[61648]: ERROR nova.db.main.api [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 453.316803] env[61648]: result = function(*args, **kwargs) [ 453.316803] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 453.316803] env[61648]: return func(*args, **kwargs) [ 453.316803] env[61648]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 453.316803] env[61648]: result = fn(*args, **kwargs) [ 453.316803] env[61648]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 453.316803] env[61648]: return f(*args, **kwargs) [ 453.316803] env[61648]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 453.316803] env[61648]: return db.service_get_minimum_version(context, binaries) [ 453.316803] env[61648]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 453.316803] env[61648]: _check_db_access() [ 453.316803] env[61648]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 453.316803] env[61648]: stacktrace = ''.join(traceback.format_stack()) [ 453.316803] env[61648]: [ 453.317915] env[61648]: ERROR nova.db.main.api [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 453.317915] env[61648]: result = function(*args, **kwargs) [ 453.317915] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 453.317915] env[61648]: return func(*args, **kwargs) [ 453.317915] env[61648]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 453.317915] env[61648]: result = fn(*args, **kwargs) [ 453.317915] env[61648]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 453.317915] env[61648]: return f(*args, **kwargs) [ 453.317915] env[61648]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 453.317915] env[61648]: return db.service_get_minimum_version(context, binaries) [ 453.317915] env[61648]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 453.317915] env[61648]: _check_db_access() [ 453.317915] env[61648]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 453.317915] env[61648]: stacktrace = ''.join(traceback.format_stack()) [ 453.317915] env[61648]: [ 453.318490] env[61648]: WARNING nova.objects.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Failed to get minimum service version for cell ab807b3f-dc86-497f-9347-43e0418a09b0 [ 453.318490] env[61648]: WARNING nova.objects.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 453.318849] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Acquiring lock "singleton_lock" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 453.319013] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Acquired lock "singleton_lock" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 453.319266] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Releasing lock "singleton_lock" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 453.319576] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Full set of CONF: {{(pid=61648) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 453.319719] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ******************************************************************************** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 453.319845] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Configuration options gathered from: {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 453.319978] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 453.320187] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 453.320315] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ================================================================================ {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 453.320562] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] allow_resize_to_same_host = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.320734] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] arq_binding_timeout = 300 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.320866] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] backdoor_port = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.320993] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] backdoor_socket = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.321170] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] block_device_allocate_retries = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.321338] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] block_device_allocate_retries_interval = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.321502] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cert = self.pem {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.321666] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.321834] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute_monitors = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322007] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] config_dir = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322183] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] config_drive_format = iso9660 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322317] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322482] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] config_source = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322647] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] console_host = devstack {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322810] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] control_exchange = nova {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.322965] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cpu_allocation_ratio = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.323135] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] daemon = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.323335] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] debug = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.323518] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] default_access_ip_network_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.323697] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] default_availability_zone = nova {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.323857] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] default_ephemeral_format = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.324029] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] default_green_pool_size = 1000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.324270] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.324434] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] default_schedule_zone = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.324592] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] disk_allocation_ratio = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.324751] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] enable_new_services = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.324926] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] enabled_apis = ['osapi_compute'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.325103] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] enabled_ssl_apis = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.325264] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] flat_injected = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.325422] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] force_config_drive = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.325579] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] force_raw_images = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.325744] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] graceful_shutdown_timeout = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.325902] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] heal_instance_info_cache_interval = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.326126] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] host = cpu-1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.326303] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.326487] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] initial_disk_allocation_ratio = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.326781] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] initial_ram_allocation_ratio = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.326862] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.327041] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_build_timeout = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.327186] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_delete_interval = 300 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.327346] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_format = [instance: %(uuid)s] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.327512] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_name_template = instance-%08x {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.327672] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_usage_audit = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.327837] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_usage_audit_period = month {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328416] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328416] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] instances_path = /opt/stack/data/nova/instances {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328416] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] internal_service_availability_zone = internal {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328497] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] key = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328627] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] live_migration_retry_count = 30 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328773] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_color = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.328935] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_config_append = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.329107] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.329267] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_dir = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.329435] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.329579] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_options = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.329744] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_rotate_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.329910] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_rotate_interval_type = days {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330085] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] log_rotation_type = none {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330224] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330351] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330521] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330683] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330809] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.330971] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] long_rpc_timeout = 1800 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.331140] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] max_concurrent_builds = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.331314] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] max_concurrent_live_migrations = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.331496] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] max_concurrent_snapshots = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.331657] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] max_local_block_devices = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.331813] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] max_logfile_count = 30 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.331970] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] max_logfile_size_mb = 200 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.332142] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] maximum_instance_delete_attempts = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.332685] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metadata_listen = 0.0.0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.332685] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metadata_listen_port = 8775 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.332685] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metadata_workers = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.332844] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] migrate_max_retries = -1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.332983] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] mkisofs_cmd = genisoimage {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.333207] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] my_block_storage_ip = 10.180.1.21 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.333362] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] my_ip = 10.180.1.21 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.333535] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] network_allocate_retries = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.333712] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.333878] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] osapi_compute_listen = 0.0.0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.334047] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] osapi_compute_listen_port = 8774 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.334217] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] osapi_compute_unique_server_name_scope = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.334381] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] osapi_compute_workers = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.334543] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] password_length = 12 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.334699] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] periodic_enable = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.334857] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] periodic_fuzzy_delay = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.335033] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] pointer_model = usbtablet {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.335203] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] preallocate_images = none {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.335364] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] publish_errors = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.335520] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] pybasedir = /opt/stack/nova {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.335688] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ram_allocation_ratio = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.335847] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] rate_limit_burst = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.336021] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] rate_limit_except_level = CRITICAL {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.336184] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] rate_limit_interval = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.336342] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reboot_timeout = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.336528] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reclaim_instance_interval = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.336733] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] record = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.336909] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reimage_timeout_per_gb = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.337086] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] report_interval = 120 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.337250] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] rescue_timeout = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.337443] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reserved_host_cpus = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.337560] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reserved_host_disk_mb = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.337715] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reserved_host_memory_mb = 512 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.337873] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] reserved_huge_pages = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.338039] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] resize_confirm_window = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.338204] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] resize_fs_using_block_device = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.338362] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] resume_guests_state_on_host_boot = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.338555] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.338729] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] rpc_response_timeout = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.338888] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] run_external_periodic_tasks = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.339069] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] running_deleted_instance_action = reap {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.339232] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] running_deleted_instance_poll_interval = 1800 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.339391] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] running_deleted_instance_timeout = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.339551] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler_instance_sync_interval = 120 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.339717] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_down_time = 720 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.339883] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] servicegroup_driver = db {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.340049] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] shell_completion = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.340213] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] shelved_offload_time = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.340369] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] shelved_poll_interval = 3600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.340537] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] shutdown_timeout = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.340697] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] source_is_ipv6 = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.340856] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ssl_only = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.341119] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.341302] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] sync_power_state_interval = 600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.341492] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] sync_power_state_pool_size = 1000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.341683] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] syslog_log_facility = LOG_USER {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.341843] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] tempdir = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342009] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] timeout_nbd = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342184] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] transport_url = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342344] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] update_resources_interval = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342503] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_cow_images = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342661] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_eventlog = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342818] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_journal = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.342974] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_json = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.343143] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_rootwrap_daemon = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.343318] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_stderr = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.343491] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] use_syslog = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.343651] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vcpu_pin_set = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.343817] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plugging_is_fatal = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.343985] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plugging_timeout = 300 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.344165] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] virt_mkfs = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.344360] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] volume_usage_poll_interval = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.344542] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] watch_log_file = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.344713] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] web = /usr/share/spice-html5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 453.344900] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.345079] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.345248] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.345421] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_concurrency.disable_process_locking = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.346027] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.346227] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.346402] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.346580] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.346754] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.346924] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.347119] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.auth_strategy = keystone {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.347298] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.compute_link_prefix = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.347541] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.347686] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.dhcp_domain = novalocal {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.347858] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.enable_instance_password = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.348037] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.glance_link_prefix = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.348214] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.348393] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.348560] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.instance_list_per_project_cells = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.348725] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.list_records_by_skipping_down_cells = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.348891] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.local_metadata_per_cell = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.349072] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.max_limit = 1000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.349245] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.metadata_cache_expiration = 15 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.349422] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.neutron_default_tenant_id = default {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.349660] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.response_validation = warn {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.349851] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.use_neutron_default_nets = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.350047] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.350221] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.350394] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.350572] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.350744] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_dynamic_targets = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.350910] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_jsonfile_path = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.351103] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.351319] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.backend = dogpile.cache.memcached {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.351515] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.backend_argument = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.351692] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.config_prefix = cache.oslo {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.351861] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.dead_timeout = 60.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.352035] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.debug_cache_backend = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.352205] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.enable_retry_client = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.352366] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.enable_socket_keepalive = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.352563] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.enabled = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.352747] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.enforce_fips_mode = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.352916] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.expiration_time = 600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.353095] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.hashclient_retry_attempts = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.353268] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.hashclient_retry_delay = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.353462] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_dead_retry = 300 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.353630] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_password = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.353796] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.353960] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.354139] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_pool_maxsize = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.354307] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.354469] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_sasl_enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.354649] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.354818] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_socket_timeout = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.354980] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.memcache_username = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.355160] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.proxies = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.355328] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_db = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.355488] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_password = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.355702] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_sentinel_service_name = mymaster {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.355888] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.356071] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_server = localhost:6379 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.356241] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_socket_timeout = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.356400] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.redis_username = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.356561] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.retry_attempts = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.356724] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.retry_delay = 0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.356885] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.socket_keepalive_count = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.357055] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.socket_keepalive_idle = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.357221] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.socket_keepalive_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.357380] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.tls_allowed_ciphers = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.357537] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.tls_cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.357693] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.tls_certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.357853] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.tls_enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.358015] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cache.tls_keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.358193] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.358368] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.auth_type = password {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.358529] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.358733] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.catalog_info = volumev3::publicURL {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.358899] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.359074] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.359239] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.cross_az_attach = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.359403] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.debug = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.359563] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.endpoint_template = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.359724] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.http_retries = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.359885] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.360054] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.360230] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.os_region_name = RegionOne {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.360397] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.360557] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cinder.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.360729] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.360888] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.cpu_dedicated_set = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.361058] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.cpu_shared_set = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.361227] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.image_type_exclude_list = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.361413] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.361599] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.max_concurrent_disk_ops = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.361779] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.max_disk_devices_to_attach = -1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.361943] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.362126] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.362293] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.resource_provider_association_refresh = 300 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.362455] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.362614] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.shutdown_retry_interval = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.362791] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.362965] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] conductor.workers = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.363156] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] console.allowed_origins = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.363350] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] console.ssl_ciphers = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.363531] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] console.ssl_minimum_version = default {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.363709] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] consoleauth.enforce_session_timeout = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.363873] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] consoleauth.token_ttl = 600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.364055] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.364219] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.364411] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.364583] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.364743] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.364902] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.365074] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.365236] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.365398] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.365556] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.365713] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.region_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.365868] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.366032] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.366206] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.service_type = accelerator {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.366369] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.366528] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.366684] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.366841] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.367028] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.367193] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] cyborg.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.367404] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.backend = sqlalchemy {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.367597] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.connection = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.367769] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.connection_debug = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.367940] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.connection_parameters = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.368119] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.connection_recycle_time = 3600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.368285] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.connection_trace = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.368450] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.db_inc_retry_interval = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.368613] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.db_max_retries = 20 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.368775] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.db_max_retry_interval = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.368939] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.db_retry_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.369115] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.max_overflow = 50 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.369282] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.max_pool_size = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.369446] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.max_retries = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.369617] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.369776] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.mysql_wsrep_sync_wait = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.369933] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.pool_timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.370107] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.retry_interval = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.370270] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.slave_connection = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.370459] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.sqlite_synchronous = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.370635] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] database.use_db_reconnect = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.370816] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.backend = sqlalchemy {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.370985] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.connection = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.371167] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.connection_debug = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.371362] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.connection_parameters = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.371546] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.connection_recycle_time = 3600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.371713] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.connection_trace = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.371877] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.db_inc_retry_interval = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.372053] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.db_max_retries = 20 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.372218] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.db_max_retry_interval = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.372383] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.db_retry_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.372547] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.max_overflow = 50 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.372709] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.max_pool_size = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.372869] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.max_retries = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.373055] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.373222] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.373423] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.pool_timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.373611] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.retry_interval = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.373776] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.slave_connection = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.373941] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] api_database.sqlite_synchronous = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.374132] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] devices.enabled_mdev_types = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.374316] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.374490] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ephemeral_storage_encryption.default_format = luks {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.374655] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ephemeral_storage_encryption.enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.374820] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.374994] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.api_servers = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.375172] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.375338] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.375502] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.375663] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.375819] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.375979] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.debug = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.376164] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.default_trusted_certificate_ids = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.376330] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.enable_certificate_validation = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.376521] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.enable_rbd_download = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.376690] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.376856] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.377040] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.377207] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.377367] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.377531] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.num_retries = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.377702] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.rbd_ceph_conf = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.377864] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.rbd_connect_timeout = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.378054] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.rbd_pool = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.378234] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.rbd_user = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.378401] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.region_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.378563] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.378722] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.378892] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.service_type = image {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.379066] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.379239] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.379436] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.379700] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.380011] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.380313] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.verify_glance_signatures = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.380596] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] glance.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.380871] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] guestfs.debug = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.381087] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] mks.enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.381498] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.381701] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] image_cache.manager_interval = 2400 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.381878] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] image_cache.precache_concurrency = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.382063] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] image_cache.remove_unused_base_images = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.382239] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.382413] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.382597] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] image_cache.subdirectory_name = _base {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.382776] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.api_max_retries = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.382943] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.api_retry_interval = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.383119] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.383285] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.auth_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.383472] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.383638] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.383801] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.383964] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.conductor_group = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.384138] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.384303] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.384461] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.384624] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.384779] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.384937] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.385106] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.385276] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.peer_list = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.385468] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.region_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.385632] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.385802] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.serial_console_state_timeout = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.385960] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.386143] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.service_type = baremetal {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.386308] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.shard = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.386470] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.386628] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.386785] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.386943] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.387138] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.387304] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ironic.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.387492] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.387666] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] key_manager.fixed_key = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.387846] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388016] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.barbican_api_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388184] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.barbican_endpoint = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388354] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.barbican_endpoint_type = public {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388515] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.barbican_region_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388675] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388834] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.388996] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.389172] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.389332] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.389497] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.number_of_retries = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.389657] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.retry_delay = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.389819] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.send_service_user_token = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.389979] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.390151] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.390312] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.verify_ssl = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.390470] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican.verify_ssl_path = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.390637] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.390801] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.auth_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.390960] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.391131] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.391310] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.391494] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.391661] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.391823] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.391982] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] barbican_service_user.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.392163] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.approle_role_id = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.392325] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.approle_secret_id = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.392497] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.kv_mountpoint = secret {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.392660] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.kv_path = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.392828] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.kv_version = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.392989] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.namespace = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.393164] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.root_token_id = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.393349] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.ssl_ca_crt_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.393526] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.timeout = 60.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.393693] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.use_ssl = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.393864] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.394046] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.394217] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.auth_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.394382] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.394544] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.394706] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.394864] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395031] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395194] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395355] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395512] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395671] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395829] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.395988] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.region_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.396160] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.396321] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.396493] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.service_type = identity {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.396661] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.396816] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.396974] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.397144] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.397326] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.397490] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] keystone.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.397691] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.connection_uri = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.397856] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_mode = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.398086] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_model_extra_flags = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.398209] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_models = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.398383] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_power_governor_high = performance {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.398556] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_power_governor_low = powersave {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.398722] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_power_management = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.398896] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.399074] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.device_detach_attempts = 8 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.399245] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.device_detach_timeout = 20 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.399414] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.disk_cachemodes = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.399576] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.disk_prefix = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.399743] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.enabled_perf_events = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.399907] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.file_backed_memory = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.400082] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.gid_maps = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.400251] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.hw_disk_discard = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.400411] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.hw_machine_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.400588] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_rbd_ceph_conf = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.400753] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.400918] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.401097] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_rbd_glance_store_name = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.401273] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_rbd_pool = rbd {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.401468] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_type = default {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.401636] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.images_volume_group = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.401803] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.inject_key = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.401968] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.inject_partition = -2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.402144] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.inject_password = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.402311] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.iscsi_iface = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.402475] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.iser_use_multipath = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.402638] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_bandwidth = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.402801] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.402964] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_downtime = 500 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.403141] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.403339] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.403509] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_inbound_addr = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.403676] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.403848] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_permit_post_copy = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.404022] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_scheme = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.404204] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_timeout_action = abort {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.404373] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_tunnelled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.404533] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_uri = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.404695] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.live_migration_with_native_tls = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.404857] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.max_queues = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.405029] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.405285] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.405455] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.nfs_mount_options = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.405770] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.405947] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.406130] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.num_iser_scan_tries = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.406297] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.num_memory_encrypted_guests = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.406466] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.406629] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.num_pcie_ports = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.406798] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.num_volume_scan_tries = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.406966] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.pmem_namespaces = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.407138] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.quobyte_client_cfg = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.407439] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.407615] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rbd_connect_timeout = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.407781] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.407946] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.408126] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rbd_secret_uuid = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.408288] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rbd_user = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.408457] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.408632] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.remote_filesystem_transport = ssh {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.408794] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rescue_image_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.408955] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rescue_kernel_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.409128] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rescue_ramdisk_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.409300] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.409463] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.rx_queue_size = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.409632] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.smbfs_mount_options = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.409909] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.410094] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.snapshot_compression = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.410263] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.snapshot_image_format = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.410484] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.410654] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.sparse_logical_volumes = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.410819] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.swtpm_enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.410990] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.swtpm_group = tss {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.411199] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.swtpm_user = tss {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.411387] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.sysinfo_serial = unique {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.411557] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.tb_cache_size = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.411718] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.tx_queue_size = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.411886] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.uid_maps = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.412062] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.use_virtio_for_bridges = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.412241] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.virt_type = kvm {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.412415] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.volume_clear = zero {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.412583] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.volume_clear_size = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.412811] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.volume_use_multipath = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.412911] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_cache_path = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.413092] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.413265] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_mount_group = qemu {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.413463] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_mount_opts = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.413643] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.413927] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.414115] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.vzstorage_mount_user = stack {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.414285] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.414465] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.414639] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.auth_type = password {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.414801] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.414962] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.415141] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.415305] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.415470] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.415640] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.default_floating_pool = public {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.415803] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.415967] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.extension_sync_interval = 600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.416144] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.http_retries = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.416311] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.416472] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.416631] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.416804] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.416962] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.417145] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.ovs_bridge = br-int {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.417312] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.physnets = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.417485] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.region_name = RegionOne {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.417646] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.417816] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.service_metadata_proxy = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.417978] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.418165] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.service_type = network {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.418342] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.418487] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.418648] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.418808] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.418987] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.419161] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] neutron.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.419333] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] notifications.bdms_in_notifications = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.419512] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] notifications.default_level = INFO {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.419697] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] notifications.notification_format = unversioned {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.419865] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] notifications.notify_on_state_change = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.420050] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.420232] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] pci.alias = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.420405] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] pci.device_spec = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.420573] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] pci.report_in_placement = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.420745] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.420918] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.auth_type = password {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.421099] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.421265] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.421425] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.421591] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.421751] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.421911] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.422082] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.default_domain_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.422242] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.default_domain_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.422401] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.domain_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.422561] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.domain_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.422721] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.422883] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.423053] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.423218] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.423387] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.423552] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.password = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.423712] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.project_domain_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.423879] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.project_domain_name = Default {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.424058] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.project_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.424234] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.project_name = service {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.424408] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.region_name = RegionOne {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.424572] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.424731] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.424901] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.service_type = placement {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.425077] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.425241] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.425402] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.425563] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.system_scope = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.425722] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.425881] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.trust_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.426049] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.user_domain_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.426221] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.user_domain_name = Default {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.426379] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.user_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.426554] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.username = nova {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.426735] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.426899] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] placement.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.427087] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.cores = 20 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.427257] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.count_usage_from_placement = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.427428] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.427607] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.injected_file_content_bytes = 10240 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.427776] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.injected_file_path_length = 255 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.427941] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.injected_files = 5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.428119] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.instances = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.428286] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.key_pairs = 100 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.428454] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.metadata_items = 128 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.428620] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.ram = 51200 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.428784] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.recheck_quota = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.428950] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.server_group_members = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.429127] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] quota.server_groups = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.429301] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.429465] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.429627] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.image_metadata_prefilter = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.429786] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.429948] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.max_attempts = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.430123] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.max_placement_results = 1000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.430287] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.430453] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.query_placement_for_image_type_support = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.430617] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.430791] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] scheduler.workers = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.430965] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.431150] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.431332] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.431505] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.431671] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.431837] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.432009] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.432211] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.432383] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.host_subset_size = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.432553] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.432717] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.432881] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.433056] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.isolated_hosts = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.433225] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.isolated_images = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.433394] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.433557] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.433721] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.433884] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.pci_in_placement = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.434055] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.434220] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.434384] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.434545] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.434706] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.434869] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.435038] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.track_instance_changes = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.435221] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.435392] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metrics.required = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.435558] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metrics.weight_multiplier = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.435721] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.435886] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] metrics.weight_setting = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.436228] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.436411] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] serial_console.enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.436593] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] serial_console.port_range = 10000:20000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.436766] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.436938] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.437120] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] serial_console.serialproxy_port = 6083 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.437293] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.437474] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.auth_type = password {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.437629] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.437787] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.437952] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.438131] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.438293] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.438466] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.send_service_user_token = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.438631] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.438804] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] service_user.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.438979] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.agent_enabled = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.439159] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.439475] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.439670] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.439840] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.html5proxy_port = 6082 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.440011] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.image_compression = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.440209] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.jpeg_compression = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.440373] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.playback_compression = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.440542] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.require_secure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.440714] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.server_listen = 127.0.0.1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.440883] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.441051] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.streaming_mode = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.441216] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] spice.zlib_compression = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.441386] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] upgrade_levels.baseapi = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.441557] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] upgrade_levels.compute = auto {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.441718] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] upgrade_levels.conductor = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.441878] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] upgrade_levels.scheduler = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.442053] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.442219] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.auth_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.442378] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.442537] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.442700] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.442860] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.443026] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.443192] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.443354] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vendordata_dynamic_auth.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.443528] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.api_retry_count = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.443686] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.ca_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.443855] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.cache_prefix = devstack-image-cache {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.444031] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.cluster_name = testcl1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.444201] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.connection_pool_size = 10 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.444360] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.console_delay_seconds = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.444528] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.datastore_regex = ^datastore.* {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.444743] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.444914] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.host_password = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.445092] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.host_port = 443 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.445268] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.host_username = administrator@vsphere.local {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.445435] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.insecure = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.445600] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.integration_bridge = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.445764] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.maximum_objects = 100 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.445923] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.pbm_default_policy = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.446095] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.pbm_enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.446258] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.pbm_wsdl_location = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.446429] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.446590] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.serial_port_proxy_uri = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.446750] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.serial_port_service_uri = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.446917] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.task_poll_interval = 0.5 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.447099] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.use_linked_clone = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.447274] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.vnc_keymap = en-us {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.447441] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.vnc_port = 5900 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.447607] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vmware.vnc_port_total = 10000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.447792] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.auth_schemes = ['none'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.447966] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.448285] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.448473] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.448688] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.novncproxy_port = 6080 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.448821] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.server_listen = 127.0.0.1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.448993] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.449169] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.vencrypt_ca_certs = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.449333] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.vencrypt_client_cert = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.449494] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vnc.vencrypt_client_key = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.449675] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.449838] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.disable_deep_image_inspection = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450006] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450173] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450334] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450496] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.disable_rootwrap = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450658] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.enable_numa_live_migration = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450818] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.450978] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.451151] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.451314] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.libvirt_disable_apic = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.451473] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.451634] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.451793] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.451952] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.452124] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.452285] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.452446] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.452605] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.452764] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.452929] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.453124] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.453313] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.client_socket_timeout = 900 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.453472] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.default_pool_size = 1000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.453637] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.keep_alive = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.453803] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.max_header_line = 16384 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.453967] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.secure_proxy_ssl_header = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.454142] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.ssl_ca_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.454308] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.ssl_cert_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.454472] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.ssl_key_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.454638] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.tcp_keepidle = 600 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.454817] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.454982] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] zvm.ca_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.455155] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] zvm.cloud_connector_url = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.455471] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.455647] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] zvm.reachable_timeout = 300 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.455828] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.enforce_new_defaults = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.456241] env[61648]: WARNING oslo_config.cfg [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 453.456429] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.enforce_scope = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.456612] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.policy_default_rule = default {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.456797] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.456975] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.policy_file = policy.yaml {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.457162] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.457326] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.457491] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.457652] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.457815] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.457985] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.458174] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.458357] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.connection_string = messaging:// {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.458530] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.enabled = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.458706] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.es_doc_type = notification {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.458875] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.es_scroll_size = 10000 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.459053] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.es_scroll_time = 2m {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.459223] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.filter_error_trace = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.459394] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.hmac_keys = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.459565] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.sentinel_service_name = mymaster {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.459734] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.socket_timeout = 0.1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.459896] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.trace_requests = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.460068] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler.trace_sqlalchemy = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.460258] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler_jaeger.process_tags = {} {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.460421] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler_jaeger.service_name_prefix = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.460589] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] profiler_otlp.service_name_prefix = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.460756] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] remote_debug.host = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.460915] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] remote_debug.port = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.461106] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.461274] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.461437] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.461601] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.461763] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.461923] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.462094] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.462260] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.462422] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.462596] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.462753] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.462924] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.463102] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.463282] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.463453] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.463622] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.463787] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.463963] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.464139] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.464304] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.464476] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.464639] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.464802] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.464969] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.465143] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.465309] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.465473] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.465635] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.465804] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.465970] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.ssl = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.466159] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.466332] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.466499] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.466669] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.466839] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.ssl_version = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.467016] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.467208] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.467377] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_notifications.retry = -1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.467564] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.467741] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_messaging_notifications.transport_url = **** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.467914] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.auth_section = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.468091] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.auth_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.468255] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.cafile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.468413] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.certfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.468576] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.collect_timing = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.468737] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.connect_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.468920] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.connect_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.469062] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.endpoint_id = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.469223] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.endpoint_override = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.469383] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.insecure = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.469542] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.keyfile = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.469699] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.max_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.469856] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.min_version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470022] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.region_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470187] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.retriable_status_codes = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470346] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.service_name = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470510] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.service_type = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470674] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.split_loggers = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470832] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.status_code_retries = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.470991] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.status_code_retry_delay = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.471164] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.timeout = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.471324] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.valid_interfaces = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.471483] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_limit.version = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.471648] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_reports.file_event_handler = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.471812] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.471972] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] oslo_reports.log_dir = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.472157] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.472321] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.472483] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.472650] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.472818] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.472977] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.473162] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.473341] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_ovs_privileged.group = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.473488] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.473652] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.473813] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.473971] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] vif_plug_ovs_privileged.user = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.474155] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.flat_interface = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.474337] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.474512] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.474682] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.474852] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.475028] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.475198] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.475362] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.475544] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.475716] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.isolate_vif = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.475883] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.476057] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.476231] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.476401] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.ovsdb_interface = native {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.476564] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] os_vif_ovs.per_port_bridge = False {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.476731] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] privsep_osbrick.capabilities = [21] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.476892] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] privsep_osbrick.group = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.477060] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] privsep_osbrick.helper_command = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.477230] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.477394] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.477554] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] privsep_osbrick.user = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.477725] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.477886] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] nova_sys_admin.group = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.478053] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] nova_sys_admin.helper_command = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.478223] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.478387] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.478547] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] nova_sys_admin.user = None {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 453.478678] env[61648]: DEBUG oslo_service.service [None req-f43049bb-41ea-4522-8979-3c4594afa4ec None None] ******************************************************************************** {{(pid=61648) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 453.479202] env[61648]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 453.983069] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Getting list of instances from cluster (obj){ [ 453.983069] env[61648]: value = "domain-c8" [ 453.983069] env[61648]: _type = "ClusterComputeResource" [ 453.983069] env[61648]: } {{(pid=61648) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 453.984252] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b65695f-6513-4bba-aaf0-784599a4a705 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 453.993012] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Got total of 0 instances {{(pid=61648) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 453.993579] env[61648]: WARNING nova.virt.vmwareapi.driver [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 453.994056] env[61648]: INFO nova.virt.node [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Generated node identity 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 [ 453.994315] env[61648]: INFO nova.virt.node [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Wrote node identity 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 to /opt/stack/data/n-cpu-1/compute_id [ 454.498226] env[61648]: WARNING nova.compute.manager [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Compute nodes ['1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 455.504943] env[61648]: INFO nova.compute.manager [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 456.512021] env[61648]: WARNING nova.compute.manager [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 456.512021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 456.512021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 456.512021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.512021] env[61648]: DEBUG nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 456.512021] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65b4302-6c57-4e98-b3d5-cdaf540a3929 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.520580] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa02c28-5ec1-445e-a9c6-43d70ce768d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.534276] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842feb4a-1ff6-4f49-9e90-f4966d4db29d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.540621] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fc82ee-d061-43e8-8740-590dff290730 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.569763] env[61648]: DEBUG nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181454MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 456.570139] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 456.570480] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 457.074705] env[61648]: WARNING nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] No compute node record for cpu-1:1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 could not be found. [ 457.577475] env[61648]: INFO nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 [ 459.088021] env[61648]: DEBUG nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 459.088021] env[61648]: DEBUG nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 459.234336] env[61648]: INFO nova.scheduler.client.report [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] [req-9e598e82-64c6-4c93-bc05-af2bbb07ed14] Created resource provider record via placement API for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 459.250685] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68b19f2-ee23-4e40-8e98-711cbed37b92 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.259069] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c5db3c-9045-4a7c-971f-f44c8016a1db {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.287645] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ecbeb9-c5f2-4fef-93be-3359c1da8759 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.295027] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d5216b-8b9e-42f1-9683-b841774e491a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.307212] env[61648]: DEBUG nova.compute.provider_tree [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 459.857868] env[61648]: DEBUG nova.scheduler.client.report [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 459.857868] env[61648]: DEBUG nova.compute.provider_tree [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 0 to 1 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 459.857868] env[61648]: DEBUG nova.compute.provider_tree [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 459.945666] env[61648]: DEBUG nova.compute.provider_tree [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 1 to 2 during operation: update_traits {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 460.453529] env[61648]: DEBUG nova.compute.resource_tracker [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 460.453529] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.881s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 460.453529] env[61648]: DEBUG nova.service [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Creating RPC server for service compute {{(pid=61648) start /opt/stack/nova/nova/service.py:186}} [ 460.466695] env[61648]: DEBUG nova.service [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] Join ServiceGroup membership for this service compute {{(pid=61648) start /opt/stack/nova/nova/service.py:203}} [ 460.466695] env[61648]: DEBUG nova.servicegroup.drivers.db [None req-92a33618-396a-4851-80a7-9fd4c2e51afb None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61648) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 492.606033] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquiring lock "c741026a-6cd4-49c8-8604-f67cf7189c8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 492.606033] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Lock "c741026a-6cd4-49c8-8604-f67cf7189c8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 493.113601] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 493.661823] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 493.662444] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 493.665008] env[61648]: INFO nova.compute.claims [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 494.720045] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15ac93e-79d0-4840-86e9-9736b948825c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.729822] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4d35cd-52f3-4262-9961-e1d43689629b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.766704] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ffc03c-531f-46b3-8638-ff1f329d757d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.775093] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd6ec70-06b9-4762-87e6-a615e686e19c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.794486] env[61648]: DEBUG nova.compute.provider_tree [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 495.299136] env[61648]: DEBUG nova.scheduler.client.report [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 495.806563] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 495.808132] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 496.314672] env[61648]: DEBUG nova.compute.utils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 496.316534] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 496.320104] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 496.542895] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquiring lock "199ab8b8-15d8-47b3-8e72-d3995047cb45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 496.543166] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Lock "199ab8b8-15d8-47b3-8e72-d3995047cb45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 496.823391] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 497.050584] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 497.581367] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 497.581596] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 497.583213] env[61648]: INFO nova.compute.claims [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 497.846075] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 498.163827] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.164074] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.328113] env[61648]: DEBUG nova.policy [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb168bca8ce747d8bb5dc69596398307', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8f419a14cd24179b8ebbc5620c2e4bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 498.458657] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 498.458900] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 498.459548] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 498.459839] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 498.459956] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 498.460136] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 498.460355] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 498.460672] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 498.461200] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 498.461998] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 498.462078] env[61648]: DEBUG nova.virt.hardware [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 498.464229] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91597062-d4cb-4004-acc4-d5bb82b49a97 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.476335] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f94107-abfb-4f5b-b979-79e4be09b9c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.504391] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30bfcbc-6885-445f-952a-fa4dc03d4640 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.669216] env[61648]: DEBUG nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 498.689855] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee47f17b-d283-4f80-a9b6-f28b75768b07 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.699941] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da35984b-cf85-4d3e-8895-14ad980d8897 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.732822] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b6eef3-1f31-4358-a475-c7a9db47028e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.745028] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abda0ba0-acdf-4a5c-8e26-d61433d93564 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.757635] env[61648]: DEBUG nova.compute.provider_tree [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 499.148823] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Successfully created port: 39a70d53-105a-4064-b125-3f7c619ed1ca {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 499.205520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 499.260873] env[61648]: DEBUG nova.scheduler.client.report [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 499.461187] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquiring lock "c72ac06b-b114-4c5e-af9f-fd7dfc880a34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 499.461577] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Lock "c72ac06b-b114-4c5e-af9f-fd7dfc880a34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 499.767771] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.186s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 499.770065] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 499.775763] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.571s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 499.777194] env[61648]: INFO nova.compute.claims [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 499.964465] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 500.285704] env[61648]: DEBUG nova.compute.utils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 500.287704] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 500.288070] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 500.492243] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 500.508571] env[61648]: DEBUG nova.policy [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fad1bbdae67a41478d64f1ded6159e04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50f59e23969141cbb9d1f106a757a923', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 500.794102] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 500.896793] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a20bb2-f5cb-422f-8ee3-8aa3b0223601 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.904631] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b890c5f-ef06-4d16-9346-897cf8c7f46c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.938356] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1f8607-6bc4-4958-a0d6-db25108f732c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.948841] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be494a11-e945-4262-ae9e-4b39939bf78b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.961761] env[61648]: DEBUG nova.compute.provider_tree [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 501.465057] env[61648]: DEBUG nova.scheduler.client.report [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 501.705305] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "92d3ea1d-8a72-4f4b-87be-70367170d933" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.705305] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "92d3ea1d-8a72-4f4b-87be-70367170d933" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.810729] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 501.856462] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 501.856846] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 501.856935] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 501.858739] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 501.858739] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 501.858739] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 501.858739] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 501.858739] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 501.858875] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 501.858875] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 501.858875] env[61648]: DEBUG nova.virt.hardware [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 501.860550] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98af92cb-af3f-4d68-9175-b3f4127c6da4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.871410] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56085cc-52db-49c6-af8b-abb8326d0621 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.970421] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 501.970975] env[61648]: DEBUG nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 501.975812] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.484s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.977642] env[61648]: INFO nova.compute.claims [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 502.146244] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Successfully created port: f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 502.208015] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 502.424487] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "4deb5eaa-e8e2-41cc-aef9-722235e69b95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.425090] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "4deb5eaa-e8e2-41cc-aef9-722235e69b95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.488608] env[61648]: DEBUG nova.compute.utils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 502.490255] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "77ce1c04-88c1-4df5-9436-4f1878217c4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.490255] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "77ce1c04-88c1-4df5-9436-4f1878217c4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.493564] env[61648]: DEBUG nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 502.623143] env[61648]: ERROR nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 502.623143] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 502.623143] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 502.623143] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 502.623143] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 502.623143] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 502.623143] env[61648]: ERROR nova.compute.manager raise self.value [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 502.623143] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 502.623143] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 502.623143] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 502.623748] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 502.623748] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 502.623748] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 502.623748] env[61648]: ERROR nova.compute.manager [ 502.623748] env[61648]: Traceback (most recent call last): [ 502.623748] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 502.623748] env[61648]: listener.cb(fileno) [ 502.623748] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 502.623748] env[61648]: result = function(*args, **kwargs) [ 502.623748] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 502.623748] env[61648]: return func(*args, **kwargs) [ 502.623748] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 502.623748] env[61648]: raise e [ 502.623748] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 502.623748] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 502.623748] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 502.623748] env[61648]: created_port_ids = self._update_ports_for_instance( [ 502.623748] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 502.623748] env[61648]: with excutils.save_and_reraise_exception(): [ 502.623748] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 502.623748] env[61648]: self.force_reraise() [ 502.623748] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 502.623748] env[61648]: raise self.value [ 502.623748] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 502.623748] env[61648]: updated_port = self._update_port( [ 502.623748] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 502.623748] env[61648]: _ensure_no_port_binding_failure(port) [ 502.623748] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 502.623748] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 502.624505] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 502.624505] env[61648]: Removing descriptor: 14 [ 502.626343] env[61648]: ERROR nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Traceback (most recent call last): [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] yield resources [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self.driver.spawn(context, instance, image_meta, [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] vm_ref = self.build_virtual_machine(instance, [ 502.626343] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] vif_infos = vmwarevif.get_vif_info(self._session, [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] for vif in network_info: [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return self._sync_wrapper(fn, *args, **kwargs) [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self.wait() [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self[:] = self._gt.wait() [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return self._exit_event.wait() [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 502.626703] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] result = hub.switch() [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return self.greenlet.switch() [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] result = function(*args, **kwargs) [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return func(*args, **kwargs) [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] raise e [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] nwinfo = self.network_api.allocate_for_instance( [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] created_port_ids = self._update_ports_for_instance( [ 502.627014] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] with excutils.save_and_reraise_exception(): [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self.force_reraise() [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] raise self.value [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] updated_port = self._update_port( [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] _ensure_no_port_binding_failure(port) [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] raise exception.PortBindingFailed(port_id=port['id']) [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 502.627327] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] [ 502.627643] env[61648]: INFO nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Terminating instance [ 502.628698] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquiring lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 502.628858] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquired lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 502.629032] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 502.742303] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.838617] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquiring lock "368c44e8-756c-4b11-8a63-9f69e007769c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.838861] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Lock "368c44e8-756c-4b11-8a63-9f69e007769c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 502.934122] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 502.995574] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 503.000894] env[61648]: DEBUG nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 503.222514] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453aec05-d489-4ce8-8141-6d4dd6520de7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.230903] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 503.232503] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa06abb-d9eb-4ed6-93d4-76f614823455 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.271270] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcc9614-ea55-4607-9f76-5bc3437d550f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.282507] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ab4cea-f4aa-44e9-9339-d15cda1f79c2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.300236] env[61648]: DEBUG nova.compute.provider_tree [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 503.342043] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 503.343614] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 503.470185] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.528721] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.649108] env[61648]: DEBUG nova.compute.manager [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Received event network-changed-39a70d53-105a-4064-b125-3f7c619ed1ca {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 503.649304] env[61648]: DEBUG nova.compute.manager [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Refreshing instance network info cache due to event network-changed-39a70d53-105a-4064-b125-3f7c619ed1ca. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 503.649485] env[61648]: DEBUG oslo_concurrency.lockutils [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] Acquiring lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 503.767075] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "68e5fc5b-a843-4f49-a903-4ed145d63fd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.767310] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "68e5fc5b-a843-4f49-a903-4ed145d63fd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 503.806734] env[61648]: DEBUG nova.scheduler.client.report [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 503.844763] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Releasing lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 503.848308] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 503.848356] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 503.848983] env[61648]: DEBUG oslo_concurrency.lockutils [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] Acquired lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 503.849237] env[61648]: DEBUG nova.network.neutron [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Refreshing network info cache for port 39a70d53-105a-4064-b125-3f7c619ed1ca {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 503.852935] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59afc77a-2fd8-490c-9567-9c54c301d1d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.866231] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0528978-28bf-4320-9d2f-49a2c982f105 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.893220] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c741026a-6cd4-49c8-8604-f67cf7189c8a could not be found. [ 503.893475] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 503.893957] env[61648]: INFO nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 503.894406] env[61648]: DEBUG oslo.service.loopingcall [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 503.894603] env[61648]: DEBUG nova.compute.manager [-] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 503.894603] env[61648]: DEBUG nova.network.neutron [-] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 503.902845] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.943649] env[61648]: DEBUG nova.network.neutron [-] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 504.014587] env[61648]: DEBUG nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 504.058353] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 504.058894] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 504.058894] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 504.059037] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 504.059219] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 504.059261] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 504.059460] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 504.062350] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 504.062614] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 504.062848] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 504.063240] env[61648]: DEBUG nova.virt.hardware [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 504.066875] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19f39bc-29a5-4530-adc4-1f364cc272cf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.079092] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77cad1e-223d-4e28-8382-8cdfc85d0c0a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.106700] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 504.126029] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 504.126029] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93a86d9d-55f0-4e3c-8d78-ac5795673be3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.137483] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Created folder: OpenStack in parent group-v4. [ 504.137483] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Creating folder: Project (4aa5338369a94405bc330d0bc6301fbf). Parent ref: group-v285225. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 504.137697] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6eb058a-74b7-45fc-bd08-9b37383cebcd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.149542] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Created folder: Project (4aa5338369a94405bc330d0bc6301fbf) in parent group-v285225. [ 504.152615] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Creating folder: Instances. Parent ref: group-v285226. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 504.152883] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8414133e-96cc-4b37-ad9a-78390a73b785 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.164302] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Created folder: Instances in parent group-v285226. [ 504.165205] env[61648]: DEBUG oslo.service.loopingcall [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 504.169019] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 504.169019] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ee113c1-f0eb-44ba-9697-328548cf7656 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.185094] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 504.185094] env[61648]: value = "task-1336580" [ 504.185094] env[61648]: _type = "Task" [ 504.185094] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 504.197706] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336580, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 504.273516] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 504.310302] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 504.310517] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 504.313881] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.572s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.321017] env[61648]: INFO nova.compute.claims [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 504.410050] env[61648]: DEBUG nova.network.neutron [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 504.448253] env[61648]: DEBUG nova.network.neutron [-] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 504.590636] env[61648]: DEBUG nova.network.neutron [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 504.695953] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336580, 'name': CreateVM_Task, 'duration_secs': 0.371117} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 504.696507] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 504.697684] env[61648]: DEBUG oslo_vmware.service [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad39ba2c-c7f8-4c20-b7a8-f0cf065dd455 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.706018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 504.706018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 504.706018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 504.706018] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67868605-7129-456f-97e9-4f161ff69cc0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.710130] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 504.710130] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]5237e6b2-b269-5689-582f-6031b56b739d" [ 504.710130] env[61648]: _type = "Task" [ 504.710130] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 504.721489] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5237e6b2-b269-5689-582f-6031b56b739d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 504.807363] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.825445] env[61648]: DEBUG nova.compute.utils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 504.833349] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 504.833349] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 504.954591] env[61648]: INFO nova.compute.manager [-] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Took 1.06 seconds to deallocate network for instance. [ 504.959768] env[61648]: DEBUG nova.compute.claims [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 504.960792] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.002113] env[61648]: DEBUG nova.policy [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a30d19c366d45859c62906992a3fbf9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '713d02f4209c48d3997afa0e1a8b792a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 505.101996] env[61648]: DEBUG oslo_concurrency.lockutils [req-2c251c05-12e5-49e8-8b17-96f7cb5fa4c8 req-9d07d31d-4496-486a-92bb-188f018360b0 service nova] Releasing lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 505.228158] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 505.228158] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 505.228158] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 505.228158] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 505.228296] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 505.228296] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eec36778-8752-48ff-8595-9bae19133a89 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.237448] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 505.238126] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 505.239292] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ed41bb-210e-4ef6-95a4-25924adea097 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.249017] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd98c41-285f-4837-afd6-b5f1f83fc4f4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.253831] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 505.253831] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52120171-f547-3198-1384-e2ce31327af1" [ 505.253831] env[61648]: _type = "Task" [ 505.253831] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 505.262616] env[61648]: ERROR nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 505.262616] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 505.262616] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 505.262616] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 505.262616] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 505.262616] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 505.262616] env[61648]: ERROR nova.compute.manager raise self.value [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 505.262616] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 505.262616] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 505.262616] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 505.263184] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 505.263184] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 505.263184] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 505.263184] env[61648]: ERROR nova.compute.manager [ 505.263184] env[61648]: Traceback (most recent call last): [ 505.263184] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 505.263184] env[61648]: listener.cb(fileno) [ 505.263184] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 505.263184] env[61648]: result = function(*args, **kwargs) [ 505.263184] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 505.263184] env[61648]: return func(*args, **kwargs) [ 505.263184] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 505.263184] env[61648]: raise e [ 505.263184] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 505.263184] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 505.263184] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 505.263184] env[61648]: created_port_ids = self._update_ports_for_instance( [ 505.263184] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 505.263184] env[61648]: with excutils.save_and_reraise_exception(): [ 505.263184] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 505.263184] env[61648]: self.force_reraise() [ 505.263184] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 505.263184] env[61648]: raise self.value [ 505.263184] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 505.263184] env[61648]: updated_port = self._update_port( [ 505.263184] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 505.263184] env[61648]: _ensure_no_port_binding_failure(port) [ 505.263184] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 505.263184] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 505.265913] env[61648]: nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 505.265913] env[61648]: Removing descriptor: 16 [ 505.265913] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52120171-f547-3198-1384-e2ce31327af1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 505.265913] env[61648]: ERROR nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 505.265913] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Traceback (most recent call last): [ 505.265913] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 505.265913] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] yield resources [ 505.265913] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 505.265913] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self.driver.spawn(context, instance, image_meta, [ 505.265913] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] vm_ref = self.build_virtual_machine(instance, [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] vif_infos = vmwarevif.get_vif_info(self._session, [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] for vif in network_info: [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return self._sync_wrapper(fn, *args, **kwargs) [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self.wait() [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 505.266416] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self[:] = self._gt.wait() [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return self._exit_event.wait() [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] result = hub.switch() [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return self.greenlet.switch() [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] result = function(*args, **kwargs) [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return func(*args, **kwargs) [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] raise e [ 505.266741] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] nwinfo = self.network_api.allocate_for_instance( [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] created_port_ids = self._update_ports_for_instance( [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] with excutils.save_and_reraise_exception(): [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self.force_reraise() [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] raise self.value [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] updated_port = self._update_port( [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 505.267058] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] _ensure_no_port_binding_failure(port) [ 505.267520] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 505.267520] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] raise exception.PortBindingFailed(port_id=port['id']) [ 505.267520] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 505.267520] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] [ 505.267520] env[61648]: INFO nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Terminating instance [ 505.267520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquiring lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 505.267520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquired lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 505.267792] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 505.334242] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 505.400317] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquiring lock "9a03a74b-7fad-4338-ae6f-82c493cd44e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.400317] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Lock "9a03a74b-7fad-4338-ae6f-82c493cd44e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.468030] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._sync_power_states {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 505.586677] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4f44a5-3eab-4505-8498-dfd8c74d1e07 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.593717] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8de98a-4036-46a9-87ef-ff17f455c036 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.633029] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6443d8b-08e3-4fd9-b7ed-1105e8333cc0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.640202] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80fdce2-e6eb-4c85-8e0a-cd5ad8e0773f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.659890] env[61648]: DEBUG nova.compute.provider_tree [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 505.762591] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Preparing fetch location {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 505.763195] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Creating directory with path [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 505.765670] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a178c396-93d2-44b5-ac05-200ff17a535a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.796416] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Created directory with path [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 505.797016] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Fetch image to [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 505.797016] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Downloading image file data a3243eb3-32d0-4887-afc7-2030d2340206 to [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk on the data store datastore2 {{(pid=61648) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 505.797625] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3619bc6d-7c57-4190-ad17-d35ced9a3726 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.813706] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ced3d3-c589-46f6-8666-ac2cb4e11864 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.828518] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1e44f2-96d5-4603-acee-5866923b31bb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.871216] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d607c81-2dac-4af3-9d99-7e6afbdadddd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.879168] env[61648]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cf9c1920-48c2-4d02-9025-badcbc59bbf0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.898744] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 505.913467] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Successfully created port: 6f7c5f96-3bb5-4285-bc03-5d1512385cfa {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 505.919168] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 505.974803] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Getting list of instances from cluster (obj){ [ 505.974803] env[61648]: value = "domain-c8" [ 505.974803] env[61648]: _type = "ClusterComputeResource" [ 505.974803] env[61648]: } {{(pid=61648) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 505.979441] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36375d8c-eea9-4f70-b43a-5a2948a33eb9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.988696] env[61648]: DEBUG nova.virt.vmwareapi.images [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Downloading image file data a3243eb3-32d0-4887-afc7-2030d2340206 to the data store datastore2 {{(pid=61648) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 505.999518] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Got total of 1 instances {{(pid=61648) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 505.999950] env[61648]: WARNING nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] While synchronizing instance power states, found 5 instances in the database and 1 instances on the hypervisor. [ 506.000271] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Triggering sync for uuid c741026a-6cd4-49c8-8604-f67cf7189c8a {{(pid=61648) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 506.000584] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Triggering sync for uuid 199ab8b8-15d8-47b3-8e72-d3995047cb45 {{(pid=61648) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 506.000786] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Triggering sync for uuid 767c2c81-2508-4dcd-97d7-28726c2c6d31 {{(pid=61648) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 506.001170] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Triggering sync for uuid c72ac06b-b114-4c5e-af9f-fd7dfc880a34 {{(pid=61648) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 506.001385] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Triggering sync for uuid 92d3ea1d-8a72-4f4b-87be-70367170d933 {{(pid=61648) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 506.001852] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "c741026a-6cd4-49c8-8604-f67cf7189c8a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.002240] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "199ab8b8-15d8-47b3-8e72-d3995047cb45" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.002454] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.002645] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "c72ac06b-b114-4c5e-af9f-fd7dfc880a34" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.002842] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "92d3ea1d-8a72-4f4b-87be-70367170d933" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.003950] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 506.004546] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Getting list of instances from cluster (obj){ [ 506.004546] env[61648]: value = "domain-c8" [ 506.004546] env[61648]: _type = "ClusterComputeResource" [ 506.004546] env[61648]: } {{(pid=61648) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 506.006784] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1877791-fad6-41d7-9778-bf45fa8e7d36 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.018227] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Got total of 1 instances {{(pid=61648) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 506.090277] env[61648]: DEBUG oslo_vmware.rw_handles [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61648) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 506.160479] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 506.173136] env[61648]: DEBUG nova.scheduler.client.report [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 506.372377] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 506.407949] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 506.408242] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 506.409169] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 506.410115] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 506.410307] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 506.410469] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 506.410705] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 506.410840] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 506.410985] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 506.411179] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 506.411501] env[61648]: DEBUG nova.virt.hardware [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 506.413311] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68065a4-fad4-4688-adfd-8e20c12a39e0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.436115] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e7b55f-45f3-43bf-bcc3-adcdf6f5975e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.455841] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.497702] env[61648]: DEBUG nova.compute.manager [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Received event network-changed-f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 506.497971] env[61648]: DEBUG nova.compute.manager [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Refreshing instance network info cache due to event network-changed-f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 506.500081] env[61648]: DEBUG oslo_concurrency.lockutils [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] Acquiring lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 506.663834] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Releasing lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 506.664375] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 506.664627] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 506.664978] env[61648]: DEBUG oslo_concurrency.lockutils [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] Acquired lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 506.665879] env[61648]: DEBUG nova.network.neutron [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Refreshing network info cache for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 506.667279] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64399692-7b37-4fd9-be63-e1b9bf5f9047 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.681617] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31359bc8-86e6-4694-810a-947a43450388 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.695985] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.696899] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 506.702424] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.232s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.704052] env[61648]: INFO nova.compute.claims [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 506.719755] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 199ab8b8-15d8-47b3-8e72-d3995047cb45 could not be found. [ 506.720142] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 506.721026] env[61648]: INFO nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Took 0.06 seconds to destroy the instance on the hypervisor. [ 506.721026] env[61648]: DEBUG oslo.service.loopingcall [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 506.723397] env[61648]: DEBUG nova.compute.manager [-] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 506.723397] env[61648]: DEBUG nova.network.neutron [-] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 506.747463] env[61648]: DEBUG nova.network.neutron [-] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 506.783622] env[61648]: DEBUG oslo_vmware.rw_handles [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Completed reading data from the image iterator. {{(pid=61648) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 506.783622] env[61648]: DEBUG oslo_vmware.rw_handles [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61648) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 506.888769] env[61648]: DEBUG nova.compute.manager [req-61a7ee08-c00b-446b-bdd8-34a3c9faad8d req-b8dd1add-5305-4146-a469-3ad2da74d944 service nova] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Received event network-vif-deleted-39a70d53-105a-4064-b125-3f7c619ed1ca {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 506.924633] env[61648]: DEBUG nova.virt.vmwareapi.images [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Downloaded image file data a3243eb3-32d0-4887-afc7-2030d2340206 to vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk on the data store datastore2 {{(pid=61648) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 506.926929] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Caching image {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 506.927283] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Copying Virtual Disk [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk to [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 506.927387] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6ea1dde-3240-4ac3-839d-ea304c3f13af {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.937607] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 506.937607] env[61648]: value = "task-1336581" [ 506.937607] env[61648]: _type = "Task" [ 506.937607] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 506.947508] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 507.203743] env[61648]: DEBUG nova.compute.utils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 507.204854] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 507.205255] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 507.212240] env[61648]: DEBUG nova.network.neutron [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 507.253115] env[61648]: DEBUG nova.network.neutron [-] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 507.295694] env[61648]: DEBUG nova.policy [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8210a3ea4f35489bbea80e490f7a00c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91de5af21aba4db98efb62b0675a9c92', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 507.454423] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336581, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 507.545756] env[61648]: DEBUG nova.network.neutron [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 507.713475] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 507.757292] env[61648]: INFO nova.compute.manager [-] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Took 1.03 seconds to deallocate network for instance. [ 507.768556] env[61648]: DEBUG nova.compute.claims [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 507.768556] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.949820] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336581, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732332} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 507.950990] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Copied Virtual Disk [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk to [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 507.951400] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleting the datastore file [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206/tmp-sparse.vmdk {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 507.952036] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24a3a8cf-1587-4910-b170-64fde8fa1ed5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.960558] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 507.960558] env[61648]: value = "task-1336582" [ 507.960558] env[61648]: _type = "Task" [ 507.960558] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 507.981331] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 507.986508] env[61648]: ERROR nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 507.986508] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 507.986508] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 507.986508] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 507.986508] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 507.986508] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 507.986508] env[61648]: ERROR nova.compute.manager raise self.value [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 507.986508] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 507.986508] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 507.986508] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 507.987717] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 507.987717] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 507.987717] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 507.987717] env[61648]: ERROR nova.compute.manager [ 507.987717] env[61648]: Traceback (most recent call last): [ 507.987717] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 507.987717] env[61648]: listener.cb(fileno) [ 507.987717] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 507.987717] env[61648]: result = function(*args, **kwargs) [ 507.987717] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 507.987717] env[61648]: return func(*args, **kwargs) [ 507.987717] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 507.987717] env[61648]: raise e [ 507.987717] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 507.987717] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 507.987717] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 507.987717] env[61648]: created_port_ids = self._update_ports_for_instance( [ 507.987717] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 507.987717] env[61648]: with excutils.save_and_reraise_exception(): [ 507.987717] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 507.987717] env[61648]: self.force_reraise() [ 507.987717] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 507.987717] env[61648]: raise self.value [ 507.987717] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 507.987717] env[61648]: updated_port = self._update_port( [ 507.987717] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 507.987717] env[61648]: _ensure_no_port_binding_failure(port) [ 507.987717] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 507.987717] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 507.988807] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 507.988807] env[61648]: Removing descriptor: 14 [ 507.988807] env[61648]: ERROR nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Traceback (most recent call last): [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] yield resources [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self.driver.spawn(context, instance, image_meta, [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 507.988807] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] vm_ref = self.build_virtual_machine(instance, [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] vif_infos = vmwarevif.get_vif_info(self._session, [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] for vif in network_info: [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return self._sync_wrapper(fn, *args, **kwargs) [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self.wait() [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self[:] = self._gt.wait() [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return self._exit_event.wait() [ 507.989119] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] result = hub.switch() [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return self.greenlet.switch() [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] result = function(*args, **kwargs) [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return func(*args, **kwargs) [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] raise e [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] nwinfo = self.network_api.allocate_for_instance( [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 507.989450] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] created_port_ids = self._update_ports_for_instance( [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] with excutils.save_and_reraise_exception(): [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self.force_reraise() [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] raise self.value [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] updated_port = self._update_port( [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] _ensure_no_port_binding_failure(port) [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 507.989780] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] raise exception.PortBindingFailed(port_id=port['id']) [ 507.990147] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 507.990147] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] [ 507.990147] env[61648]: INFO nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Terminating instance [ 507.990147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquiring lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 507.990147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquired lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 507.990283] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 508.049595] env[61648]: DEBUG oslo_concurrency.lockutils [req-e5568427-5c0b-475a-8ee6-52e80303e634 req-98fd69aa-c074-4c18-8810-c8f7c5f824d8 service nova] Releasing lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 508.077566] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c42e048-8b3c-4bf0-92d9-af25d1d9d15f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.088873] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea845368-0b8f-463d-a6a6-9320e1b0560e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.129452] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e35cd4-96ac-43d8-a082-d9fd9d0c11ff {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.138203] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917530a9-f15c-4b83-a0c2-4149f2b5343f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.158009] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 508.477258] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024523} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 508.477258] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 508.477258] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Moving file from [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761/a3243eb3-32d0-4887-afc7-2030d2340206 to [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206. {{(pid=61648) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 508.477408] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-eb5e65bf-9a5a-469c-b9bb-7f07e32c97de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.485680] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 508.485680] env[61648]: value = "task-1336583" [ 508.485680] env[61648]: _type = "Task" [ 508.485680] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 508.499437] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336583, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 508.534083] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 508.628051] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Successfully created port: ebbbae79-2dd1-42c0-9f07-56cf7947d131 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 508.662026] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 508.727685] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 508.738191] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 508.776297] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 508.776581] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 508.776751] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 508.776928] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 508.778039] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 508.778250] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 508.778471] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 508.778685] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 508.778823] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 508.778945] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 508.779129] env[61648]: DEBUG nova.virt.hardware [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 508.780017] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467c26c1-0514-49f9-98a1-a7ac8b5287e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.788754] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0302cb-e334-4a5e-a03f-4996bd5832d4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.998028] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336583, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.201633} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 508.998028] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] File moved {{(pid=61648) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 508.998028] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Cleaning up location [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 508.998028] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleting the datastore file [datastore2] vmware_temp/71801259-1d4f-4b7d-9499-d182f1d6a761 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 508.998028] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6627f784-828f-4aad-a356-b0ec7d038498 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.006702] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 509.006702] env[61648]: value = "task-1336584" [ 509.006702] env[61648]: _type = "Task" [ 509.006702] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 509.016254] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 509.167625] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 509.167886] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 509.172891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.643s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.174393] env[61648]: INFO nova.compute.claims [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 509.247073] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Releasing lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 509.247073] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 509.247073] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 509.247073] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-222c8b76-a5aa-4c33-8a15-5d1c83f264d5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.257940] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2645aa-5150-4e7f-b98f-67144493f30c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.285261] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c72ac06b-b114-4c5e-af9f-fd7dfc880a34 could not be found. [ 509.285633] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 509.285714] env[61648]: INFO nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Took 0.04 seconds to destroy the instance on the hypervisor. [ 509.285906] env[61648]: DEBUG oslo.service.loopingcall [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 509.286174] env[61648]: DEBUG nova.compute.manager [-] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 509.286345] env[61648]: DEBUG nova.network.neutron [-] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 509.366925] env[61648]: DEBUG nova.network.neutron [-] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 509.377473] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.378048] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.379432] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 509.379432] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 509.521159] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022853} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 509.521159] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 509.521159] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1685c448-eaab-4040-a6d0-c3dc50d7ff6e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.530308] env[61648]: DEBUG nova.compute.manager [req-877671ae-a0bc-4c68-9a99-09b016c715ea req-35f49a60-9c80-4865-a0d2-b0551ed2a2be service nova] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Received event network-vif-deleted-f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 509.531215] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 509.531215] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52904bce-1fa1-5bae-7b44-77303d14b960" [ 509.531215] env[61648]: _type = "Task" [ 509.531215] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 509.543847] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52904bce-1fa1-5bae-7b44-77303d14b960, 'name': SearchDatastore_Task, 'duration_secs': 0.00915} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 509.544316] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 509.544724] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 767c2c81-2508-4dcd-97d7-28726c2c6d31/767c2c81-2508-4dcd-97d7-28726c2c6d31.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 509.545294] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1051a80f-ee58-447c-9f85-bfc42c04fc3e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.553214] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 509.553214] env[61648]: value = "task-1336585" [ 509.553214] env[61648]: _type = "Task" [ 509.553214] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 509.559813] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 509.623029] env[61648]: DEBUG nova.compute.manager [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Received event network-changed-6f7c5f96-3bb5-4285-bc03-5d1512385cfa {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 509.623029] env[61648]: DEBUG nova.compute.manager [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Refreshing instance network info cache due to event network-changed-6f7c5f96-3bb5-4285-bc03-5d1512385cfa. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 509.623476] env[61648]: DEBUG oslo_concurrency.lockutils [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] Acquiring lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 509.623476] env[61648]: DEBUG oslo_concurrency.lockutils [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] Acquired lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 509.623737] env[61648]: DEBUG nova.network.neutron [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Refreshing network info cache for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 509.681342] env[61648]: DEBUG nova.compute.utils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 509.684218] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 509.684968] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 509.868910] env[61648]: DEBUG nova.network.neutron [-] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 509.883869] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.884121] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.884317] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.884427] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.884683] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.885139] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.885139] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 509.885346] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Didn't find any instances for network info cache update. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 509.886654] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.887028] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.887611] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.887878] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.888144] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.888421] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 509.888697] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 509.888846] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 510.051785] env[61648]: DEBUG nova.policy [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1679b8e397bd4dffab66422c61d72078', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61cc7ac999324335bf3a0bb3d84f7f4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 510.066360] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336585, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 510.173930] env[61648]: DEBUG nova.network.neutron [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 510.184327] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 510.376753] env[61648]: INFO nova.compute.manager [-] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Took 1.09 seconds to deallocate network for instance. [ 510.387024] env[61648]: DEBUG nova.compute.claims [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 510.387024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.392218] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.455819] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45494113-1874-4e74-babf-b6e5149b9847 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.465523] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ceb0cd-9583-4d50-8844-6a07124b9d4a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.501843] env[61648]: DEBUG nova.network.neutron [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 510.504962] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510663ae-9805-44c4-8729-177bc22753fd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.514035] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64825ff-170d-4209-847c-237fa3beabf2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.530508] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 510.566656] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546123} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 510.566656] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 767c2c81-2508-4dcd-97d7-28726c2c6d31/767c2c81-2508-4dcd-97d7-28726c2c6d31.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 510.566656] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 510.566656] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32eab3d0-311c-4208-a9a2-dd9f59d79285 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.573807] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 510.573807] env[61648]: value = "task-1336586" [ 510.573807] env[61648]: _type = "Task" [ 510.573807] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 510.584631] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336586, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 511.009929] env[61648]: DEBUG oslo_concurrency.lockutils [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] Releasing lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 511.009929] env[61648]: DEBUG nova.compute.manager [req-ff9fff33-3678-463b-8656-3d0872e90938 req-a3fad9f6-f432-475e-9991-ce3ca7b7a5ad service nova] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Received event network-vif-deleted-6f7c5f96-3bb5-4285-bc03-5d1512385cfa {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 511.053376] env[61648]: ERROR nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [req-ff68fcb7-d803-45b1-98be-b672c35b5c9e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ff68fcb7-d803-45b1-98be-b672c35b5c9e"}]} [ 511.072714] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 511.085045] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336586, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070179} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 511.085045] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 511.086025] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2605d9f-979d-4cc7-b6a0-0a14be8fcdc1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.090671] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 511.090885] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 511.120780] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 767c2c81-2508-4dcd-97d7-28726c2c6d31/767c2c81-2508-4dcd-97d7-28726c2c6d31.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 511.121993] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 511.124095] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1516a84f-31ac-44e7-933d-45fa3680a5d7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.143808] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 511.143808] env[61648]: value = "task-1336587" [ 511.143808] env[61648]: _type = "Task" [ 511.143808] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 511.155414] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336587, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 511.180160] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 511.194872] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 511.231977] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 511.232481] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 511.234164] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 511.234164] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 511.234164] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 511.234164] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 511.234164] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 511.234373] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 511.234863] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 511.234863] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 511.234863] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 511.235706] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1ce0c0-a8fd-4a1d-8050-99b5865e8029 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.249989] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49aade4d-f381-4140-a5b9-5b0ad8b4e521 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.387283] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7987d9-7b72-4608-908b-02faba39e1d4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.396675] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec27707-df28-458b-bf95-9d3aef53f889 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.433091] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10560d5-c7fc-4ca1-aed7-b6c3d27e40df {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.440734] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983f0d35-d69e-4746-bbda-6f79d3694c1b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.457336] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 511.620493] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Successfully created port: d02883f7-01b1-42b3-baf7-99d9d0e95ede {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 511.660472] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336587, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 512.005450] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 14 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 512.005555] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 14 to 15 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 512.005687] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 512.159980] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336587, 'name': ReconfigVM_Task, 'duration_secs': 0.568127} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 512.162644] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 767c2c81-2508-4dcd-97d7-28726c2c6d31/767c2c81-2508-4dcd-97d7-28726c2c6d31.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 512.163379] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18954297-9b20-4128-8e86-9f08b297c733 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.170143] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 512.170143] env[61648]: value = "task-1336588" [ 512.170143] env[61648]: _type = "Task" [ 512.170143] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 512.180618] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336588, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 512.514950] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.344s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 512.515508] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 512.525874] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.618s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.527593] env[61648]: INFO nova.compute.claims [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 512.690940] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336588, 'name': Rename_Task, 'duration_secs': 0.140991} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 512.691257] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 512.691498] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0da26f63-3e72-4d85-a158-9cf3d2817750 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.697925] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 512.697925] env[61648]: value = "task-1336589" [ 512.697925] env[61648]: _type = "Task" [ 512.697925] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 512.714122] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336589, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.033591] env[61648]: DEBUG nova.compute.utils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 513.035529] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 513.038368] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 513.209118] env[61648]: DEBUG oslo_vmware.api [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336589, 'name': PowerOnVM_Task, 'duration_secs': 0.424729} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 513.209403] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 513.209595] env[61648]: INFO nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Took 9.19 seconds to spawn the instance on the hypervisor. [ 513.209849] env[61648]: DEBUG nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 513.214023] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9211e434-818a-4142-9587-f9bc9742dc87 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.290300] env[61648]: DEBUG nova.policy [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1679b8e397bd4dffab66422c61d72078', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61cc7ac999324335bf3a0bb3d84f7f4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 513.542494] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 513.708897] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbc7aa6-91c0-4b08-9ba8-490a56a378b3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.715247] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ad7aec-c5c2-44a1-a70f-34913e3392a2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.751057] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8473b399-5423-4d41-9454-745dde60fdad {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.756009] env[61648]: INFO nova.compute.manager [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Took 14.58 seconds to build instance. [ 513.761413] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ea019f-7b4f-41d6-bdb6-a4835937dbc0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.776346] env[61648]: DEBUG nova.compute.provider_tree [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 514.260601] env[61648]: DEBUG oslo_concurrency.lockutils [None req-367852b6-5a42-469b-b630-32d90f565ac2 tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.096s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.260899] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.258s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.261073] env[61648]: INFO nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] During sync_power_state the instance has a pending task (spawning). Skip. [ 514.261246] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.281414] env[61648]: DEBUG nova.scheduler.client.report [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 514.476859] env[61648]: ERROR nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 514.476859] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 514.476859] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 514.476859] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 514.476859] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 514.476859] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 514.476859] env[61648]: ERROR nova.compute.manager raise self.value [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 514.476859] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 514.476859] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 514.476859] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 514.477391] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 514.477391] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 514.477391] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 514.477391] env[61648]: ERROR nova.compute.manager [ 514.477391] env[61648]: Traceback (most recent call last): [ 514.477391] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 514.477391] env[61648]: listener.cb(fileno) [ 514.477391] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 514.477391] env[61648]: result = function(*args, **kwargs) [ 514.477391] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 514.477391] env[61648]: return func(*args, **kwargs) [ 514.477391] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 514.477391] env[61648]: raise e [ 514.477391] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 514.477391] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 514.477391] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 514.477391] env[61648]: created_port_ids = self._update_ports_for_instance( [ 514.477391] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 514.477391] env[61648]: with excutils.save_and_reraise_exception(): [ 514.477391] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 514.477391] env[61648]: self.force_reraise() [ 514.477391] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 514.477391] env[61648]: raise self.value [ 514.477391] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 514.477391] env[61648]: updated_port = self._update_port( [ 514.477391] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 514.477391] env[61648]: _ensure_no_port_binding_failure(port) [ 514.477391] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 514.477391] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 514.478192] env[61648]: nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 514.478192] env[61648]: Removing descriptor: 18 [ 514.478192] env[61648]: ERROR nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Traceback (most recent call last): [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] yield resources [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self.driver.spawn(context, instance, image_meta, [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self._vmops.spawn(context, instance, image_meta, injected_files, [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 514.478192] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] vm_ref = self.build_virtual_machine(instance, [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] vif_infos = vmwarevif.get_vif_info(self._session, [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] for vif in network_info: [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return self._sync_wrapper(fn, *args, **kwargs) [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self.wait() [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self[:] = self._gt.wait() [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return self._exit_event.wait() [ 514.478515] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] result = hub.switch() [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return self.greenlet.switch() [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] result = function(*args, **kwargs) [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return func(*args, **kwargs) [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] raise e [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] nwinfo = self.network_api.allocate_for_instance( [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 514.478834] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] created_port_ids = self._update_ports_for_instance( [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] with excutils.save_and_reraise_exception(): [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self.force_reraise() [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] raise self.value [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] updated_port = self._update_port( [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] _ensure_no_port_binding_failure(port) [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 514.480213] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] raise exception.PortBindingFailed(port_id=port['id']) [ 514.480806] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 514.480806] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] [ 514.480806] env[61648]: INFO nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Terminating instance [ 514.480898] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 514.481035] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquired lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 514.481166] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 514.560481] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 514.600042] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 514.601523] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 514.601740] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 514.601968] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 514.602482] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 514.602717] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 514.603266] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 514.603484] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 514.603692] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 514.607598] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 514.607598] env[61648]: DEBUG nova.virt.hardware [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 514.607598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f36ef9-5572-44eb-81a3-033dd765e0f3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.621327] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532ccba1-7b51-4adf-812a-099d96fc4a0c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.794951] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.795813] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 514.800591] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.991s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.802350] env[61648]: INFO nova.compute.claims [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 514.823874] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Successfully created port: 12f09195-5216-432e-bc0f-276a8216a482 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 514.847797] env[61648]: DEBUG nova.compute.manager [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Received event network-changed-ebbbae79-2dd1-42c0-9f07-56cf7947d131 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 514.847797] env[61648]: DEBUG nova.compute.manager [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Refreshing instance network info cache due to event network-changed-ebbbae79-2dd1-42c0-9f07-56cf7947d131. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 514.850484] env[61648]: DEBUG oslo_concurrency.lockutils [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] Acquiring lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.061515] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 515.312745] env[61648]: DEBUG nova.compute.utils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 515.317276] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 515.317276] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 515.353091] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 515.571761] env[61648]: DEBUG nova.policy [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2061876f50214ddaabe2f2981c6e8189', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd66ce368718a4828b11dcc7c86802287', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 515.823501] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 515.856458] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Releasing lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 515.856458] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 515.856615] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 515.856886] env[61648]: DEBUG oslo_concurrency.lockutils [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] Acquired lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.857054] env[61648]: DEBUG nova.network.neutron [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Refreshing network info cache for port ebbbae79-2dd1-42c0-9f07-56cf7947d131 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 515.858244] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad353f01-2910-4500-a7cc-feb2a40b7bb5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.875079] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a377a1-1c9f-478a-ba7f-2c60fc9a818d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.907073] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 92d3ea1d-8a72-4f4b-87be-70367170d933 could not be found. [ 515.907073] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 515.914134] env[61648]: INFO nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Took 0.05 seconds to destroy the instance on the hypervisor. [ 515.914134] env[61648]: DEBUG oslo.service.loopingcall [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 515.914134] env[61648]: DEBUG nova.compute.manager [-] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 515.914134] env[61648]: DEBUG nova.network.neutron [-] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 516.018132] env[61648]: DEBUG nova.network.neutron [-] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 516.119691] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3f2c0d-4b67-4870-9bca-cd18ee330edd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.130626] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcadd8ba-1956-44ee-8de4-7cf305768f94 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.167778] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97920617-1a38-4bab-bf57-edf9a195d81e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.176922] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3d74f8-e67f-4ffc-a972-991507570a5a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.198388] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 516.521998] env[61648]: DEBUG nova.network.neutron [-] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.533290] env[61648]: DEBUG nova.network.neutron [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 516.704282] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 516.794348] env[61648]: DEBUG nova.network.neutron [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.838465] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 516.869755] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 516.870039] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 516.870212] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 516.870425] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 516.870885] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 516.870885] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 516.871435] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 516.871710] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 516.872019] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 516.872214] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 516.872442] env[61648]: DEBUG nova.virt.hardware [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 516.873479] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041c9f59-9132-4d25-b51d-be4fc40845c5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.884125] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bc944d-3c70-4bba-8dfc-3db295ccd106 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.967632] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Successfully created port: 00d65201-eb20-4b59-ad50-d22d8c516380 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 517.007357] env[61648]: DEBUG nova.compute.manager [None req-8b42fa86-e2db-4038-94cd-5b552bb2938c tempest-ServerDiagnosticsV248Test-503453851 tempest-ServerDiagnosticsV248Test-503453851-project-admin] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 517.011598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db493647-01b4-41c3-8732-4c37f2ba19e2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.023643] env[61648]: INFO nova.compute.manager [None req-8b42fa86-e2db-4038-94cd-5b552bb2938c tempest-ServerDiagnosticsV248Test-503453851 tempest-ServerDiagnosticsV248Test-503453851-project-admin] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Retrieving diagnostics [ 517.024566] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46cf16ef-44e8-4b19-8072-5df91c2e1cd4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.031732] env[61648]: INFO nova.compute.manager [-] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Took 1.12 seconds to deallocate network for instance. [ 517.034516] env[61648]: DEBUG nova.compute.claims [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 517.034749] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.076107] env[61648]: ERROR nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 517.076107] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 517.076107] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 517.076107] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 517.076107] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.076107] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.076107] env[61648]: ERROR nova.compute.manager raise self.value [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 517.076107] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 517.076107] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.076107] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 517.077178] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.077178] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 517.077178] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 517.077178] env[61648]: ERROR nova.compute.manager [ 517.077178] env[61648]: Traceback (most recent call last): [ 517.077178] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 517.077178] env[61648]: listener.cb(fileno) [ 517.077178] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 517.077178] env[61648]: result = function(*args, **kwargs) [ 517.077178] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 517.077178] env[61648]: return func(*args, **kwargs) [ 517.077178] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 517.077178] env[61648]: raise e [ 517.077178] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 517.077178] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 517.077178] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 517.077178] env[61648]: created_port_ids = self._update_ports_for_instance( [ 517.077178] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 517.077178] env[61648]: with excutils.save_and_reraise_exception(): [ 517.077178] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.077178] env[61648]: self.force_reraise() [ 517.077178] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.077178] env[61648]: raise self.value [ 517.077178] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 517.077178] env[61648]: updated_port = self._update_port( [ 517.077178] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.077178] env[61648]: _ensure_no_port_binding_failure(port) [ 517.077178] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.077178] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 517.077968] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 517.077968] env[61648]: Removing descriptor: 14 [ 517.077968] env[61648]: ERROR nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Traceback (most recent call last): [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] yield resources [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self.driver.spawn(context, instance, image_meta, [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 517.077968] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] vm_ref = self.build_virtual_machine(instance, [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] vif_infos = vmwarevif.get_vif_info(self._session, [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] for vif in network_info: [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return self._sync_wrapper(fn, *args, **kwargs) [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self.wait() [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self[:] = self._gt.wait() [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return self._exit_event.wait() [ 517.078332] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] result = hub.switch() [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return self.greenlet.switch() [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] result = function(*args, **kwargs) [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return func(*args, **kwargs) [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] raise e [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] nwinfo = self.network_api.allocate_for_instance( [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 517.078730] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] created_port_ids = self._update_ports_for_instance( [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] with excutils.save_and_reraise_exception(): [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self.force_reraise() [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] raise self.value [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] updated_port = self._update_port( [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] _ensure_no_port_binding_failure(port) [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.079086] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] raise exception.PortBindingFailed(port_id=port['id']) [ 517.079455] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 517.079455] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] [ 517.079455] env[61648]: INFO nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Terminating instance [ 517.080916] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 517.080916] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 517.081040] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 517.208797] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 517.209631] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 517.213329] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.253s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.299159] env[61648]: DEBUG oslo_concurrency.lockutils [req-f828acbe-7d97-4908-a38a-3ef4bb67f84b req-24aee4cd-78d2-4d5e-828f-b16dba6e8ad4 service nova] Releasing lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 517.638203] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 517.719446] env[61648]: DEBUG nova.compute.utils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 517.726748] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 517.726936] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 517.989754] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abf2ff0-b35e-4012-8c6f-4c8f6aee615f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.004442] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3910094e-37eb-4ac2-a176-6990cfd28f7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.062617] env[61648]: DEBUG nova.policy [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '364b47168dc544dc9f24bf99abd1bf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9fb970ae81348d9b75bc67c353bc8db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 518.066043] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2d7533-d463-4216-9193-a57cd6c43d22 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.076523] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9ede42-d233-4d1a-a512-3d00e8a9baa8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.094764] env[61648]: DEBUG nova.compute.provider_tree [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.124472] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.228177] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 518.423492] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "d6bbb34d-86a7-4686-ba60-0e418623e9fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.423668] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "d6bbb34d-86a7-4686-ba60-0e418623e9fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.602019] env[61648]: DEBUG nova.scheduler.client.report [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 518.628422] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 518.629236] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 518.629236] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 518.629390] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd59d048-f309-4b27-8170-a865e05fb81d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.639819] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf4f569-841a-4a1c-a763-8bf2d5fbd278 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.666298] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4deb5eaa-e8e2-41cc-aef9-722235e69b95 could not be found. [ 518.666579] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 518.666760] env[61648]: INFO nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Took 0.04 seconds to destroy the instance on the hypervisor. [ 518.667040] env[61648]: DEBUG oslo.service.loopingcall [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 518.667359] env[61648]: DEBUG nova.compute.manager [-] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 518.667615] env[61648]: DEBUG nova.network.neutron [-] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 518.749947] env[61648]: DEBUG nova.network.neutron [-] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 518.928117] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.108178] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.895s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.108687] env[61648]: ERROR nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Traceback (most recent call last): [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self.driver.spawn(context, instance, image_meta, [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] vm_ref = self.build_virtual_machine(instance, [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] vif_infos = vmwarevif.get_vif_info(self._session, [ 519.108687] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] for vif in network_info: [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return self._sync_wrapper(fn, *args, **kwargs) [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self.wait() [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self[:] = self._gt.wait() [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return self._exit_event.wait() [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] result = hub.switch() [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 519.109358] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return self.greenlet.switch() [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] result = function(*args, **kwargs) [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] return func(*args, **kwargs) [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] raise e [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] nwinfo = self.network_api.allocate_for_instance( [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] created_port_ids = self._update_ports_for_instance( [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] with excutils.save_and_reraise_exception(): [ 519.109782] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] self.force_reraise() [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] raise self.value [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] updated_port = self._update_port( [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] _ensure_no_port_binding_failure(port) [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] raise exception.PortBindingFailed(port_id=port['id']) [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] nova.exception.PortBindingFailed: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. [ 519.110179] env[61648]: ERROR nova.compute.manager [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] [ 519.110503] env[61648]: DEBUG nova.compute.utils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 519.110767] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.655s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.112763] env[61648]: INFO nova.compute.claims [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.123590] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Build of instance c741026a-6cd4-49c8-8604-f67cf7189c8a was re-scheduled: Binding failed for port 39a70d53-105a-4064-b125-3f7c619ed1ca, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 519.125188] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 519.125188] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquiring lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.125188] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Acquired lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.125188] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 519.237524] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 519.252723] env[61648]: DEBUG nova.network.neutron [-] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.278411] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 519.278622] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 519.278772] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 519.279197] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 519.279713] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 519.279713] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 519.279949] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 519.279949] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 519.280070] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 519.280654] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 519.280654] env[61648]: DEBUG nova.virt.hardware [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 519.283471] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37325788-7e83-454b-b59d-30a02664ce0d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.301021] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4a7a5c-50c4-4802-a928-3ba8310186d0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.462789] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.543172] env[61648]: DEBUG nova.compute.manager [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Received event network-vif-deleted-ebbbae79-2dd1-42c0-9f07-56cf7947d131 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 519.543172] env[61648]: DEBUG nova.compute.manager [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Received event network-changed-d02883f7-01b1-42b3-baf7-99d9d0e95ede {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 519.543172] env[61648]: DEBUG nova.compute.manager [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Refreshing instance network info cache due to event network-changed-d02883f7-01b1-42b3-baf7-99d9d0e95ede. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 519.543172] env[61648]: DEBUG oslo_concurrency.lockutils [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] Acquiring lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.544435] env[61648]: DEBUG oslo_concurrency.lockutils [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] Acquired lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.544624] env[61648]: DEBUG nova.network.neutron [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Refreshing network info cache for port d02883f7-01b1-42b3-baf7-99d9d0e95ede {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 519.576280] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Successfully created port: 243befa8-9485-409f-bc25-a981f5c4d0bc {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.734943] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 519.754910] env[61648]: INFO nova.compute.manager [-] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Took 1.09 seconds to deallocate network for instance. [ 519.757925] env[61648]: DEBUG nova.compute.claims [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 519.758671] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.846125] env[61648]: ERROR nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 519.846125] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 519.846125] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.846125] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.846125] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.846125] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.846125] env[61648]: ERROR nova.compute.manager raise self.value [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.846125] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 519.846125] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.846125] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 519.846629] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.846629] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 519.846629] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 519.846629] env[61648]: ERROR nova.compute.manager [ 519.846629] env[61648]: Traceback (most recent call last): [ 519.846629] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 519.846629] env[61648]: listener.cb(fileno) [ 519.846629] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 519.846629] env[61648]: result = function(*args, **kwargs) [ 519.846629] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 519.846629] env[61648]: return func(*args, **kwargs) [ 519.846629] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 519.846629] env[61648]: raise e [ 519.846629] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 519.846629] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 519.846629] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.846629] env[61648]: created_port_ids = self._update_ports_for_instance( [ 519.846629] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.846629] env[61648]: with excutils.save_and_reraise_exception(): [ 519.846629] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.846629] env[61648]: self.force_reraise() [ 519.846629] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.846629] env[61648]: raise self.value [ 519.846629] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.846629] env[61648]: updated_port = self._update_port( [ 519.846629] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.846629] env[61648]: _ensure_no_port_binding_failure(port) [ 519.846629] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.846629] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 519.848505] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 519.848505] env[61648]: Removing descriptor: 16 [ 519.848505] env[61648]: ERROR nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Traceback (most recent call last): [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] yield resources [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self.driver.spawn(context, instance, image_meta, [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 519.848505] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] vm_ref = self.build_virtual_machine(instance, [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] vif_infos = vmwarevif.get_vif_info(self._session, [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] for vif in network_info: [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return self._sync_wrapper(fn, *args, **kwargs) [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self.wait() [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self[:] = self._gt.wait() [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return self._exit_event.wait() [ 519.848841] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] result = hub.switch() [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return self.greenlet.switch() [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] result = function(*args, **kwargs) [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return func(*args, **kwargs) [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] raise e [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] nwinfo = self.network_api.allocate_for_instance( [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 519.849184] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] created_port_ids = self._update_ports_for_instance( [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] with excutils.save_and_reraise_exception(): [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self.force_reraise() [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] raise self.value [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] updated_port = self._update_port( [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] _ensure_no_port_binding_failure(port) [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 519.849525] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] raise exception.PortBindingFailed(port_id=port['id']) [ 519.849845] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 519.849845] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] [ 519.849845] env[61648]: INFO nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Terminating instance [ 519.851299] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.851299] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.851299] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 520.080322] env[61648]: DEBUG nova.network.neutron [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.252351] env[61648]: DEBUG nova.network.neutron [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.347038] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.393430] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6c0ce5-0314-4487-8af2-5340e237f244 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.402955] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5f2bbb-6d51-4b9a-92af-1a9b5c111116 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.446341] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.449503] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b004953-6cb3-43bb-a76e-3daad94d3c20 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.457447] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a207ead-ec8d-45b8-945f-9a0ecfeb40a5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.475654] env[61648]: DEBUG nova.compute.provider_tree [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 520.755862] env[61648]: DEBUG oslo_concurrency.lockutils [req-9c939bb8-672d-460c-a5ae-fe318377b8e4 req-0179f792-f8cb-4866-a9f2-2a9848b69410 service nova] Releasing lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.771618] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.852753] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Releasing lock "refresh_cache-c741026a-6cd4-49c8-8604-f67cf7189c8a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.852980] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 520.853159] env[61648]: DEBUG nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 520.853325] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 520.908285] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.978537] env[61648]: DEBUG nova.scheduler.client.report [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 521.274493] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.274691] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 521.274915] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 521.275207] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79097a7a-786c-453c-a3b0-aa9b4bfc2cb9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.289932] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b0c556-0a12-4b70-a5d9-c92898ab8f7d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.312969] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 77ce1c04-88c1-4df5-9436-4f1878217c4a could not be found. [ 521.313221] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 521.313403] env[61648]: INFO nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 521.313644] env[61648]: DEBUG oslo.service.loopingcall [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 521.313922] env[61648]: DEBUG nova.compute.manager [-] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 521.313990] env[61648]: DEBUG nova.network.neutron [-] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 521.367694] env[61648]: DEBUG nova.network.neutron [-] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 521.412911] env[61648]: DEBUG nova.network.neutron [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.484942] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.486738] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 521.489827] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.723s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.775352] env[61648]: ERROR nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 521.775352] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.775352] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.775352] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.775352] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.775352] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.775352] env[61648]: ERROR nova.compute.manager raise self.value [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.775352] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 521.775352] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.775352] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 521.776198] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.776198] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 521.776198] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 521.776198] env[61648]: ERROR nova.compute.manager [ 521.776198] env[61648]: Traceback (most recent call last): [ 521.776198] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 521.776198] env[61648]: listener.cb(fileno) [ 521.776198] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.776198] env[61648]: result = function(*args, **kwargs) [ 521.776198] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.776198] env[61648]: return func(*args, **kwargs) [ 521.776198] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.776198] env[61648]: raise e [ 521.776198] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.776198] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 521.776198] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.776198] env[61648]: created_port_ids = self._update_ports_for_instance( [ 521.776198] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.776198] env[61648]: with excutils.save_and_reraise_exception(): [ 521.776198] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.776198] env[61648]: self.force_reraise() [ 521.776198] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.776198] env[61648]: raise self.value [ 521.776198] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.776198] env[61648]: updated_port = self._update_port( [ 521.776198] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.776198] env[61648]: _ensure_no_port_binding_failure(port) [ 521.776198] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.776198] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 521.777047] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 521.777047] env[61648]: Removing descriptor: 19 [ 521.777047] env[61648]: ERROR nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Traceback (most recent call last): [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] yield resources [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self.driver.spawn(context, instance, image_meta, [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 521.777047] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] vm_ref = self.build_virtual_machine(instance, [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] vif_infos = vmwarevif.get_vif_info(self._session, [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] for vif in network_info: [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return self._sync_wrapper(fn, *args, **kwargs) [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self.wait() [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self[:] = self._gt.wait() [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return self._exit_event.wait() [ 521.777388] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] result = hub.switch() [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return self.greenlet.switch() [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] result = function(*args, **kwargs) [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return func(*args, **kwargs) [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] raise e [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] nwinfo = self.network_api.allocate_for_instance( [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.777740] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] created_port_ids = self._update_ports_for_instance( [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] with excutils.save_and_reraise_exception(): [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self.force_reraise() [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] raise self.value [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] updated_port = self._update_port( [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] _ensure_no_port_binding_failure(port) [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.778095] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] raise exception.PortBindingFailed(port_id=port['id']) [ 521.778407] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 521.778407] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] [ 521.778407] env[61648]: INFO nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Terminating instance [ 521.780249] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquiring lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.780472] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquired lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.780714] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 521.871142] env[61648]: DEBUG nova.network.neutron [-] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.916502] env[61648]: INFO nova.compute.manager [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] Took 1.06 seconds to deallocate network for instance. [ 521.946226] env[61648]: DEBUG nova.compute.manager [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Received event network-changed-12f09195-5216-432e-bc0f-276a8216a482 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 521.946226] env[61648]: DEBUG nova.compute.manager [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Refreshing instance network info cache due to event network-changed-12f09195-5216-432e-bc0f-276a8216a482. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 521.946226] env[61648]: DEBUG oslo_concurrency.lockutils [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] Acquiring lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.946226] env[61648]: DEBUG oslo_concurrency.lockutils [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] Acquired lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.947777] env[61648]: DEBUG nova.network.neutron [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Refreshing network info cache for port 12f09195-5216-432e-bc0f-276a8216a482 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 521.997422] env[61648]: DEBUG nova.compute.utils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 522.005733] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 522.005999] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 522.187469] env[61648]: DEBUG nova.policy [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92515d7ff6a34a059d9752c2f3875654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b6303f6290f484894a87066fffac256', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 522.203227] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e333f264-d6b4-44f6-982e-547d54bdb06b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.211376] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93a0b30-6ee7-473c-b177-1f3feaad4b9e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.241269] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da205a5-b439-4f59-b61d-d1a25cb6a062 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.250658] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af0014c-2ce5-4a51-8d9c-243b07b1e70c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.263515] env[61648]: DEBUG nova.compute.provider_tree [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 522.335140] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.378066] env[61648]: INFO nova.compute.manager [-] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Took 1.06 seconds to deallocate network for instance. [ 522.380502] env[61648]: DEBUG nova.compute.claims [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 522.380759] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.491492] env[61648]: DEBUG nova.network.neutron [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.506447] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 522.588316] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "7cee9c25-69f4-4ceb-ba48-0cc246657fdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.588584] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "7cee9c25-69f4-4ceb-ba48-0cc246657fdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.767482] env[61648]: DEBUG nova.scheduler.client.report [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 522.796811] env[61648]: DEBUG nova.network.neutron [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.858298] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.959500] env[61648]: INFO nova.scheduler.client.report [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Deleted allocations for instance c741026a-6cd4-49c8-8604-f67cf7189c8a [ 523.277607] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.788s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.279241] env[61648]: ERROR nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Traceback (most recent call last): [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self.driver.spawn(context, instance, image_meta, [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] vm_ref = self.build_virtual_machine(instance, [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.279241] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] for vif in network_info: [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return self._sync_wrapper(fn, *args, **kwargs) [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self.wait() [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self[:] = self._gt.wait() [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return self._exit_event.wait() [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] result = hub.switch() [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.279556] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return self.greenlet.switch() [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] result = function(*args, **kwargs) [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] return func(*args, **kwargs) [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] raise e [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] nwinfo = self.network_api.allocate_for_instance( [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] created_port_ids = self._update_ports_for_instance( [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] with excutils.save_and_reraise_exception(): [ 523.280145] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] self.force_reraise() [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] raise self.value [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] updated_port = self._update_port( [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] _ensure_no_port_binding_failure(port) [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] raise exception.PortBindingFailed(port_id=port['id']) [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] nova.exception.PortBindingFailed: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. [ 523.280492] env[61648]: ERROR nova.compute.manager [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] [ 523.280790] env[61648]: DEBUG nova.compute.utils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 523.280790] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.895s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.286089] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Build of instance 199ab8b8-15d8-47b3-8e72-d3995047cb45 was re-scheduled: Binding failed for port f2f1a48b-60e9-4fb2-a9aa-dd441ddfc59c, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 523.286089] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 523.286089] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquiring lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.286089] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Acquired lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.286258] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 523.301619] env[61648]: DEBUG oslo_concurrency.lockutils [req-0260af55-3c3a-4de5-b74a-6f8168eb17bb req-b2fb4a6a-ed8e-462d-80ac-87a00d172b3b service nova] Releasing lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.361369] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Releasing lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.361793] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 523.363094] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 523.363094] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f6c4737-7b65-47e6-b051-26d9d398855a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.374646] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3556ec2c-7641-4f8a-98f3-179179401f57 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.404981] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 368c44e8-756c-4b11-8a63-9f69e007769c could not be found. [ 523.404981] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 523.404981] env[61648]: INFO nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 523.404981] env[61648]: DEBUG oslo.service.loopingcall [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.404981] env[61648]: DEBUG nova.compute.manager [-] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 523.404981] env[61648]: DEBUG nova.network.neutron [-] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 523.472636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32b9adb2-dfc2-47ef-bd58-b3c00909214c tempest-ServerDiagnosticsTest-268114726 tempest-ServerDiagnosticsTest-268114726-project-member] Lock "c741026a-6cd4-49c8-8604-f67cf7189c8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.867s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.475221] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "c741026a-6cd4-49c8-8604-f67cf7189c8a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.473s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.475594] env[61648]: INFO nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: c741026a-6cd4-49c8-8604-f67cf7189c8a] During sync_power_state the instance has a pending task (spawning). Skip. [ 523.475594] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "c741026a-6cd4-49c8-8604-f67cf7189c8a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.481740] env[61648]: DEBUG nova.network.neutron [-] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.516507] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "f03f349e-d5ed-437b-8b13-6b036f2b88dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.516729] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "f03f349e-d5ed-437b-8b13-6b036f2b88dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.517744] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 523.553674] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.553674] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.553674] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.553946] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.553946] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.553946] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.557885] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.557885] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.558137] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.558676] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.558676] env[61648]: DEBUG nova.virt.hardware [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.559962] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c075399-5e42-4ce0-972e-9b3e32085a5e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.570687] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d3430a-ece8-49c3-a31b-c5d39ced7564 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.812446] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquiring lock "3c252464-1b1a-4c1a-86bb-2fb0107aa52f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.812446] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Lock "3c252464-1b1a-4c1a-86bb-2fb0107aa52f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.858434] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.976312] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 523.984674] env[61648]: DEBUG nova.network.neutron [-] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.014315] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Successfully created port: a2ceba79-7d63-4f21-bfa4-ac44048c6d20 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 524.087454] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e5c710-42ba-44a0-bf03-92fa4facf662 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.096623] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6c4d71-7b23-497d-9e44-37cacc1859ed {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.138983] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad84a375-1838-4470-9326-2a4dde8d203a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.146979] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96742037-a02c-40a7-936c-65745ff1924c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.163653] env[61648]: DEBUG nova.compute.provider_tree [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.191894] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.264887] env[61648]: ERROR nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 524.264887] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 524.264887] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 524.264887] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 524.264887] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 524.264887] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 524.264887] env[61648]: ERROR nova.compute.manager raise self.value [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 524.264887] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 524.264887] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 524.264887] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 524.265368] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 524.265368] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 524.265368] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 524.265368] env[61648]: ERROR nova.compute.manager [ 524.265368] env[61648]: Traceback (most recent call last): [ 524.265368] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 524.265368] env[61648]: listener.cb(fileno) [ 524.265368] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 524.265368] env[61648]: result = function(*args, **kwargs) [ 524.265368] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 524.265368] env[61648]: return func(*args, **kwargs) [ 524.265368] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 524.265368] env[61648]: raise e [ 524.265368] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 524.265368] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 524.265368] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 524.265368] env[61648]: created_port_ids = self._update_ports_for_instance( [ 524.265368] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 524.265368] env[61648]: with excutils.save_and_reraise_exception(): [ 524.265368] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 524.265368] env[61648]: self.force_reraise() [ 524.265368] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 524.265368] env[61648]: raise self.value [ 524.265368] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 524.265368] env[61648]: updated_port = self._update_port( [ 524.265368] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 524.265368] env[61648]: _ensure_no_port_binding_failure(port) [ 524.265368] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 524.265368] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 524.266951] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 524.266951] env[61648]: Removing descriptor: 18 [ 524.266951] env[61648]: ERROR nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Traceback (most recent call last): [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] yield resources [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self.driver.spawn(context, instance, image_meta, [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 524.266951] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] vm_ref = self.build_virtual_machine(instance, [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] vif_infos = vmwarevif.get_vif_info(self._session, [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] for vif in network_info: [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return self._sync_wrapper(fn, *args, **kwargs) [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self.wait() [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self[:] = self._gt.wait() [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return self._exit_event.wait() [ 524.267426] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] result = hub.switch() [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return self.greenlet.switch() [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] result = function(*args, **kwargs) [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return func(*args, **kwargs) [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] raise e [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] nwinfo = self.network_api.allocate_for_instance( [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 524.267793] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] created_port_ids = self._update_ports_for_instance( [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] with excutils.save_and_reraise_exception(): [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self.force_reraise() [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] raise self.value [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] updated_port = self._update_port( [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] _ensure_no_port_binding_failure(port) [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 524.268149] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] raise exception.PortBindingFailed(port_id=port['id']) [ 524.268479] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 524.268479] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] [ 524.268479] env[61648]: INFO nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Terminating instance [ 524.269633] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.269633] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquired lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.269633] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 524.298994] env[61648]: DEBUG nova.compute.manager [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Received event network-vif-deleted-d02883f7-01b1-42b3-baf7-99d9d0e95ede {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 524.298994] env[61648]: DEBUG nova.compute.manager [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Received event network-changed-00d65201-eb20-4b59-ad50-d22d8c516380 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 524.298994] env[61648]: DEBUG nova.compute.manager [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Refreshing instance network info cache due to event network-changed-00d65201-eb20-4b59-ad50-d22d8c516380. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 524.298994] env[61648]: DEBUG oslo_concurrency.lockutils [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] Acquiring lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.298994] env[61648]: DEBUG oslo_concurrency.lockutils [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] Acquired lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.299556] env[61648]: DEBUG nova.network.neutron [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Refreshing network info cache for port 00d65201-eb20-4b59-ad50-d22d8c516380 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 524.488779] env[61648]: INFO nova.compute.manager [-] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Took 1.08 seconds to deallocate network for instance. [ 524.491322] env[61648]: DEBUG nova.compute.claims [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 524.491481] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.502942] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.670020] env[61648]: DEBUG nova.scheduler.client.report [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 524.698416] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Releasing lock "refresh_cache-199ab8b8-15d8-47b3-8e72-d3995047cb45" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.700091] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 524.700091] env[61648]: DEBUG nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 524.700091] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 524.863101] env[61648]: DEBUG nova.network.neutron [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.928829] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.980128] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquiring lock "b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.980128] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Lock "b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.999047] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.181970] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.901s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.182636] env[61648]: ERROR nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Traceback (most recent call last): [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self.driver.spawn(context, instance, image_meta, [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] vm_ref = self.build_virtual_machine(instance, [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] vif_infos = vmwarevif.get_vif_info(self._session, [ 525.182636] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] for vif in network_info: [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return self._sync_wrapper(fn, *args, **kwargs) [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self.wait() [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self[:] = self._gt.wait() [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return self._exit_event.wait() [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] result = hub.switch() [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 525.182992] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return self.greenlet.switch() [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] result = function(*args, **kwargs) [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] return func(*args, **kwargs) [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] raise e [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] nwinfo = self.network_api.allocate_for_instance( [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] created_port_ids = self._update_ports_for_instance( [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] with excutils.save_and_reraise_exception(): [ 525.183343] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] self.force_reraise() [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] raise self.value [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] updated_port = self._update_port( [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] _ensure_no_port_binding_failure(port) [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] raise exception.PortBindingFailed(port_id=port['id']) [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] nova.exception.PortBindingFailed: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. [ 525.183658] env[61648]: ERROR nova.compute.manager [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] [ 525.183932] env[61648]: DEBUG nova.compute.utils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 525.184598] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.793s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.184773] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.184920] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 525.188319] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.154s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.191334] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Build of instance c72ac06b-b114-4c5e-af9f-fd7dfc880a34 was re-scheduled: Binding failed for port 6f7c5f96-3bb5-4285-bc03-5d1512385cfa, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 525.191811] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 525.192049] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquiring lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.192198] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Acquired lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.192353] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 525.193853] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37452520-4e68-49a9-bff1-456945e170ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.204023] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02dfab73-8d63-40c7-9e3b-6b75eb8febed {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.218531] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26664af1-fd42-4a09-bde2-8fe5c0239a82 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.225761] env[61648]: DEBUG nova.network.neutron [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.227568] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20984d0a-f827-4d96-8fb0-cc853a0c3c8a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.275306] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181452MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 525.275841] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.330297] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.437580] env[61648]: DEBUG nova.network.neutron [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.732869] env[61648]: DEBUG oslo_concurrency.lockutils [req-63585d50-91a8-4b75-a788-0f078542c107 req-61028b15-1bb6-49f7-948e-96e2448c2e2a service nova] Releasing lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.763211] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.835874] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Releasing lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.835874] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 525.835874] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 525.835874] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0afe23fd-b98a-402f-9cfd-ae6e30c3580b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.854451] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a484120-8732-48b9-afee-e071f303e1d1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.887560] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 68e5fc5b-a843-4f49-a903-4ed145d63fd7 could not be found. [ 525.887815] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 525.887987] env[61648]: INFO nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 525.888259] env[61648]: DEBUG oslo.service.loopingcall [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 525.888474] env[61648]: DEBUG nova.compute.manager [-] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 525.888562] env[61648]: DEBUG nova.network.neutron [-] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 525.944574] env[61648]: INFO nova.compute.manager [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] Took 1.24 seconds to deallocate network for instance. [ 526.040568] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "d186a201-4ef8-40a6-9625-5e8ae99af4d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.040568] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "d186a201-4ef8-40a6-9625-5e8ae99af4d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.093161] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.105017] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e994992f-d3b6-4698-89b2-285ce65c6c5a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.114774] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723754b6-8b8f-439c-a181-342afd26a229 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.149157] env[61648]: DEBUG nova.network.neutron [-] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 526.150559] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5042f26f-98ea-4e00-85a1-c8cc821aa255 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.159511] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a9d162-9214-43d1-93fa-585dcef1c07d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.174478] env[61648]: DEBUG nova.compute.provider_tree [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.331334] env[61648]: DEBUG nova.compute.manager [req-d57f8832-f99c-4f2a-8841-30143a87d975 req-471526cd-6195-4cf8-8629-208f9dd34776 service nova] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Received event network-vif-deleted-12f09195-5216-432e-bc0f-276a8216a482 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.592160] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquiring lock "c2dadf90-2469-4df6-bcc4-dd65d8a748bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.592383] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Lock "c2dadf90-2469-4df6-bcc4-dd65d8a748bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.597343] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Releasing lock "refresh_cache-c72ac06b-b114-4c5e-af9f-fd7dfc880a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.597552] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 526.598282] env[61648]: DEBUG nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 526.598282] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 526.617176] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 526.654855] env[61648]: DEBUG nova.network.neutron [-] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.678974] env[61648]: DEBUG nova.scheduler.client.report [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 526.987751] env[61648]: INFO nova.scheduler.client.report [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Deleted allocations for instance 199ab8b8-15d8-47b3-8e72-d3995047cb45 [ 527.122816] env[61648]: DEBUG nova.network.neutron [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.158260] env[61648]: INFO nova.compute.manager [-] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Took 1.27 seconds to deallocate network for instance. [ 527.163360] env[61648]: DEBUG nova.compute.claims [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 527.163560] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.184473] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.996s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.185109] env[61648]: ERROR nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Traceback (most recent call last): [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self.driver.spawn(context, instance, image_meta, [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self._vmops.spawn(context, instance, image_meta, injected_files, [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] vm_ref = self.build_virtual_machine(instance, [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] vif_infos = vmwarevif.get_vif_info(self._session, [ 527.185109] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] for vif in network_info: [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return self._sync_wrapper(fn, *args, **kwargs) [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self.wait() [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self[:] = self._gt.wait() [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return self._exit_event.wait() [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] result = hub.switch() [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 527.185962] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return self.greenlet.switch() [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] result = function(*args, **kwargs) [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] return func(*args, **kwargs) [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] raise e [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] nwinfo = self.network_api.allocate_for_instance( [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] created_port_ids = self._update_ports_for_instance( [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] with excutils.save_and_reraise_exception(): [ 527.186604] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] self.force_reraise() [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] raise self.value [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] updated_port = self._update_port( [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] _ensure_no_port_binding_failure(port) [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] raise exception.PortBindingFailed(port_id=port['id']) [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] nova.exception.PortBindingFailed: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. [ 527.187598] env[61648]: ERROR nova.compute.manager [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] [ 527.188128] env[61648]: DEBUG nova.compute.utils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 527.188128] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.725s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.188749] env[61648]: INFO nova.compute.claims [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 527.194629] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Build of instance 92d3ea1d-8a72-4f4b-87be-70367170d933 was re-scheduled: Binding failed for port ebbbae79-2dd1-42c0-9f07-56cf7947d131, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 527.195093] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 527.195357] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.195522] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquired lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.195632] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 527.429226] env[61648]: DEBUG nova.compute.manager [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Received event network-vif-deleted-00d65201-eb20-4b59-ad50-d22d8c516380 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 527.429500] env[61648]: DEBUG nova.compute.manager [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Received event network-changed-243befa8-9485-409f-bc25-a981f5c4d0bc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 527.431072] env[61648]: DEBUG nova.compute.manager [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Refreshing instance network info cache due to event network-changed-243befa8-9485-409f-bc25-a981f5c4d0bc. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 527.431072] env[61648]: DEBUG oslo_concurrency.lockutils [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] Acquiring lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.431072] env[61648]: DEBUG oslo_concurrency.lockutils [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] Acquired lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.431072] env[61648]: DEBUG nova.network.neutron [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Refreshing network info cache for port 243befa8-9485-409f-bc25-a981f5c4d0bc {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 527.503084] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bfa72a37-f8ae-4b36-ade8-4aab626110d6 tempest-TenantUsagesTestJSON-967193765 tempest-TenantUsagesTestJSON-967193765-project-member] Lock "199ab8b8-15d8-47b3-8e72-d3995047cb45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.958s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.503084] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "199ab8b8-15d8-47b3-8e72-d3995047cb45" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.500s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.503084] env[61648]: INFO nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 199ab8b8-15d8-47b3-8e72-d3995047cb45] During sync_power_state the instance has a pending task (spawning). Skip. [ 527.503084] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "199ab8b8-15d8-47b3-8e72-d3995047cb45" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.624344] env[61648]: INFO nova.compute.manager [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] Took 1.03 seconds to deallocate network for instance. [ 527.754139] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.006495] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 528.016288] env[61648]: DEBUG nova.network.neutron [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.223156] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.372140] env[61648]: DEBUG nova.network.neutron [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.545488] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.581175] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c347271c-3a31-48df-aa26-cf21c14ce595 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.589169] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55fd46b-fa80-4072-b2f2-cb2917ff0b51 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.638501] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd3881e-afd3-41db-b435-c9b8ef09b7af {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.646527] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb789d3a-2db4-49e8-897a-f79b5e0cc3ca {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.665928] env[61648]: DEBUG nova.compute.provider_tree [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.680275] env[61648]: INFO nova.scheduler.client.report [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Deleted allocations for instance c72ac06b-b114-4c5e-af9f-fd7dfc880a34 [ 528.727098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Releasing lock "refresh_cache-92d3ea1d-8a72-4f4b-87be-70367170d933" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.727098] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 528.727098] env[61648]: DEBUG nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.727098] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 528.795215] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.875621] env[61648]: DEBUG oslo_concurrency.lockutils [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] Releasing lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.875911] env[61648]: DEBUG nova.compute.manager [req-608220a2-dd0c-4e6a-81fc-2e1e85818927 req-fdadc29d-968f-400a-b00a-16e1d551a720 service nova] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Received event network-vif-deleted-243befa8-9485-409f-bc25-a981f5c4d0bc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 528.918695] env[61648]: ERROR nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 528.918695] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.918695] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.918695] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.918695] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.918695] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.918695] env[61648]: ERROR nova.compute.manager raise self.value [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.918695] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 528.918695] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.918695] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 528.919183] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.919183] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 528.919183] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 528.919183] env[61648]: ERROR nova.compute.manager [ 528.919183] env[61648]: Traceback (most recent call last): [ 528.919183] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 528.919183] env[61648]: listener.cb(fileno) [ 528.919183] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.919183] env[61648]: result = function(*args, **kwargs) [ 528.919183] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.919183] env[61648]: return func(*args, **kwargs) [ 528.919183] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 528.919183] env[61648]: raise e [ 528.919183] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.919183] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 528.919183] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.919183] env[61648]: created_port_ids = self._update_ports_for_instance( [ 528.919183] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.919183] env[61648]: with excutils.save_and_reraise_exception(): [ 528.919183] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.919183] env[61648]: self.force_reraise() [ 528.919183] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.919183] env[61648]: raise self.value [ 528.919183] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.919183] env[61648]: updated_port = self._update_port( [ 528.919183] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.919183] env[61648]: _ensure_no_port_binding_failure(port) [ 528.919183] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.919183] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 528.919896] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 528.919896] env[61648]: Removing descriptor: 19 [ 528.919896] env[61648]: ERROR nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Traceback (most recent call last): [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] yield resources [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self.driver.spawn(context, instance, image_meta, [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 528.919896] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] vm_ref = self.build_virtual_machine(instance, [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] for vif in network_info: [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return self._sync_wrapper(fn, *args, **kwargs) [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self.wait() [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self[:] = self._gt.wait() [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return self._exit_event.wait() [ 528.920332] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] result = hub.switch() [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return self.greenlet.switch() [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] result = function(*args, **kwargs) [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return func(*args, **kwargs) [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] raise e [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] nwinfo = self.network_api.allocate_for_instance( [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.920729] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] created_port_ids = self._update_ports_for_instance( [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] with excutils.save_and_reraise_exception(): [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self.force_reraise() [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] raise self.value [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] updated_port = self._update_port( [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] _ensure_no_port_binding_failure(port) [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.921080] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] raise exception.PortBindingFailed(port_id=port['id']) [ 528.921487] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 528.921487] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] [ 528.921487] env[61648]: INFO nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Terminating instance [ 528.925940] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquiring lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.926283] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquired lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.926283] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 529.173023] env[61648]: DEBUG nova.scheduler.client.report [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 529.190399] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d866751d-9ff3-405f-a74a-79f931628ce1 tempest-ServerExternalEventsTest-1211924591 tempest-ServerExternalEventsTest-1211924591-project-member] Lock "c72ac06b-b114-4c5e-af9f-fd7dfc880a34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.727s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.194716] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "c72ac06b-b114-4c5e-af9f-fd7dfc880a34" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 23.188s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.195043] env[61648]: INFO nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: c72ac06b-b114-4c5e-af9f-fd7dfc880a34] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 529.195261] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "c72ac06b-b114-4c5e-af9f-fd7dfc880a34" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.005s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.301154] env[61648]: DEBUG nova.network.neutron [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.468077] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.601081] env[61648]: DEBUG nova.compute.manager [None req-51865b38-efb3-45fb-abec-981b4db8b008 tempest-ServerDiagnosticsV248Test-503453851 tempest-ServerDiagnosticsV248Test-503453851-project-admin] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 529.601772] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc865df-44b2-4264-b367-9d9e1e416ea9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.610565] env[61648]: INFO nova.compute.manager [None req-51865b38-efb3-45fb-abec-981b4db8b008 tempest-ServerDiagnosticsV248Test-503453851 tempest-ServerDiagnosticsV248Test-503453851-project-admin] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Retrieving diagnostics [ 529.611572] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15a3a0a-d7ac-4372-9e8e-ac9d68ee23c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.661127] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.680984] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.681510] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 529.684168] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.926s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.698474] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.805042] env[61648]: INFO nova.compute.manager [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] Took 1.08 seconds to deallocate network for instance. [ 530.166792] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Releasing lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.167428] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 530.168144] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 530.168574] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fdcb430-95af-4649-9c5b-9db60066fe1a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.181402] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1ec741-3e1f-4eaf-9d5a-3032fa139e45 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.194188] env[61648]: DEBUG nova.compute.utils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 530.199705] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 530.199705] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 530.217818] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9a03a74b-7fad-4338-ae6f-82c493cd44e3 could not be found. [ 530.217818] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 530.217818] env[61648]: INFO nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 530.217818] env[61648]: DEBUG oslo.service.loopingcall [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.218638] env[61648]: DEBUG nova.compute.manager [-] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.218949] env[61648]: DEBUG nova.network.neutron [-] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 530.237745] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.285549] env[61648]: DEBUG nova.network.neutron [-] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.321224] env[61648]: DEBUG nova.policy [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30b82480a0304b0aa24d641b7758d4ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8beeea85c1074df2a9eac7339fdad0cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 530.495430] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f8fb28-3bfb-46fc-87aa-511f105af93c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.512440] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23501a89-8b44-49d0-84a4-32508012c8ed {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.550471] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb427a93-226e-451e-8fc4-02aefe0cb55c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.560552] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69288d19-e326-48d8-b51d-afa5116919c8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.576787] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.699719] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 530.788512] env[61648]: DEBUG nova.network.neutron [-] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.859250] env[61648]: INFO nova.scheduler.client.report [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Deleted allocations for instance 92d3ea1d-8a72-4f4b-87be-70367170d933 [ 531.080591] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.293391] env[61648]: INFO nova.compute.manager [-] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Took 1.07 seconds to deallocate network for instance. [ 531.295457] env[61648]: DEBUG nova.compute.claims [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 531.295457] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.370535] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2e171e04-88f5-4cde-87a0-7ff636219dbd tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "92d3ea1d-8a72-4f4b-87be-70367170d933" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.664s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.372163] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "92d3ea1d-8a72-4f4b-87be-70367170d933" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.369s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.372163] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a09cad8f-797e-45a5-b295-c4a64ee6f659 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.386290] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fd5fb4-7ef4-4453-9bce-0fb1ab427bf9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.403021] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Successfully created port: 10601d2a-d3de-4fbc-ba87-2c5d7c6816df {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.594178] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.906s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.594178] env[61648]: ERROR nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Traceback (most recent call last): [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self.driver.spawn(context, instance, image_meta, [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 531.594178] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] vm_ref = self.build_virtual_machine(instance, [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] vif_infos = vmwarevif.get_vif_info(self._session, [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] for vif in network_info: [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return self._sync_wrapper(fn, *args, **kwargs) [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self.wait() [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self[:] = self._gt.wait() [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return self._exit_event.wait() [ 531.595850] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] result = hub.switch() [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return self.greenlet.switch() [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] result = function(*args, **kwargs) [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] return func(*args, **kwargs) [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] raise e [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] nwinfo = self.network_api.allocate_for_instance( [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.596207] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] created_port_ids = self._update_ports_for_instance( [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] with excutils.save_and_reraise_exception(): [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] self.force_reraise() [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] raise self.value [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] updated_port = self._update_port( [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] _ensure_no_port_binding_failure(port) [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.596552] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] raise exception.PortBindingFailed(port_id=port['id']) [ 531.596926] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] nova.exception.PortBindingFailed: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. [ 531.596926] env[61648]: ERROR nova.compute.manager [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] [ 531.596926] env[61648]: DEBUG nova.compute.utils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 531.597805] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Build of instance 4deb5eaa-e8e2-41cc-aef9-722235e69b95 was re-scheduled: Binding failed for port d02883f7-01b1-42b3-baf7-99d9d0e95ede, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 531.598289] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 531.598545] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.599822] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.599822] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 531.602825] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.220s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.638220] env[61648]: DEBUG nova.compute.manager [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Received event network-changed-a2ceba79-7d63-4f21-bfa4-ac44048c6d20 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 531.638411] env[61648]: DEBUG nova.compute.manager [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Refreshing instance network info cache due to event network-changed-a2ceba79-7d63-4f21-bfa4-ac44048c6d20. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 531.640882] env[61648]: DEBUG oslo_concurrency.lockutils [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] Acquiring lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.640882] env[61648]: DEBUG oslo_concurrency.lockutils [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] Acquired lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.640882] env[61648]: DEBUG nova.network.neutron [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Refreshing network info cache for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 531.717244] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 531.754159] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.755919] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.756492] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.756895] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.757252] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.757603] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.757983] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.758275] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.759032] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.759390] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.759712] env[61648]: DEBUG nova.virt.hardware [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.761746] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5af43b-0b6c-476c-8c99-5e286d51da0f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.770149] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a2eb69-b9b6-473a-936d-8d08e5c13309 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.881906] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 531.930254] env[61648]: INFO nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 92d3ea1d-8a72-4f4b-87be-70367170d933] During the sync_power process the instance has moved from host None to host cpu-1 [ 531.930567] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "92d3ea1d-8a72-4f4b-87be-70367170d933" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.559s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.096536] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "66562ea6-5d39-4b98-a9e2-0512295ab94f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.096536] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "66562ea6-5d39-4b98-a9e2-0512295ab94f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.180408] env[61648]: DEBUG nova.network.neutron [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.300389] env[61648]: DEBUG nova.network.neutron [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.323211] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3782c1-aec6-44eb-8d43-fe39a5e2b183 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.332189] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c25a34-cce1-4dbc-b587-e5596474b01a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.336206] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.373842] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a34e8ec-6c26-404b-bdf7-a00cf6df6bb1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.382322] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077f616c-faae-44f6-a7d5-9c4e5eb51ee9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.400232] env[61648]: DEBUG nova.compute.provider_tree [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.415537] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.637160] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.804463] env[61648]: DEBUG oslo_concurrency.lockutils [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] Releasing lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.804625] env[61648]: DEBUG nova.compute.manager [req-c16e0b5e-5a12-4124-9e1c-3918b576a1e8 req-fddd5463-a7d9-42af-8d26-eed38061abe7 service nova] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Received event network-vif-deleted-a2ceba79-7d63-4f21-bfa4-ac44048c6d20 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 532.907629] env[61648]: DEBUG nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.141629] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-4deb5eaa-e8e2-41cc-aef9-722235e69b95" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.141629] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 533.141629] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 533.141629] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 533.177272] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.320723] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.321379] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.322871] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "767c2c81-2508-4dcd-97d7-28726c2c6d31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.323232] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.323517] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.326992] env[61648]: INFO nova.compute.manager [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Terminating instance [ 533.328027] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "refresh_cache-767c2c81-2508-4dcd-97d7-28726c2c6d31" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.328027] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquired lock "refresh_cache-767c2c81-2508-4dcd-97d7-28726c2c6d31" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.328027] env[61648]: DEBUG nova.network.neutron [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.420045] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.819s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.421018] env[61648]: ERROR nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Traceback (most recent call last): [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self.driver.spawn(context, instance, image_meta, [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] vm_ref = self.build_virtual_machine(instance, [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.421018] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] for vif in network_info: [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return self._sync_wrapper(fn, *args, **kwargs) [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self.wait() [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self[:] = self._gt.wait() [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return self._exit_event.wait() [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] result = hub.switch() [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.421360] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return self.greenlet.switch() [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] result = function(*args, **kwargs) [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] return func(*args, **kwargs) [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] raise e [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] nwinfo = self.network_api.allocate_for_instance( [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] created_port_ids = self._update_ports_for_instance( [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] with excutils.save_and_reraise_exception(): [ 533.421736] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] self.force_reraise() [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] raise self.value [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] updated_port = self._update_port( [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] _ensure_no_port_binding_failure(port) [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] raise exception.PortBindingFailed(port_id=port['id']) [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] nova.exception.PortBindingFailed: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. [ 533.422065] env[61648]: ERROR nova.compute.manager [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] [ 533.423892] env[61648]: DEBUG nova.compute.utils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 533.425279] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Build of instance 77ce1c04-88c1-4df5-9436-4f1878217c4a was re-scheduled: Binding failed for port 12f09195-5216-432e-bc0f-276a8216a482, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 533.425617] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 533.425882] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.426068] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.426195] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.427380] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.936s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.682245] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.854757] env[61648]: DEBUG nova.network.neutron [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.977287] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.154858] env[61648]: DEBUG nova.network.neutron [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.185774] env[61648]: INFO nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 4deb5eaa-e8e2-41cc-aef9-722235e69b95] Took 1.05 seconds to deallocate network for instance. [ 534.189974] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.258891] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca148e9e-eb31-465c-9db1-7816f91ea721 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.267804] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ec9d94-53bb-4af9-8def-ed125d9ae76f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.300909] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecc869f-c0c2-4273-bb9c-0a98c93d2449 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.313064] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b0d510-bb60-4a3b-b1f8-3153f7e3d606 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.326630] env[61648]: DEBUG nova.compute.provider_tree [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.513278] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.513542] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.522555] env[61648]: ERROR nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 534.522555] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.522555] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.522555] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.522555] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.522555] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.522555] env[61648]: ERROR nova.compute.manager raise self.value [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.522555] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 534.522555] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.522555] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 534.523107] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.523107] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 534.523107] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 534.523107] env[61648]: ERROR nova.compute.manager [ 534.523107] env[61648]: Traceback (most recent call last): [ 534.523107] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 534.523107] env[61648]: listener.cb(fileno) [ 534.523107] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.523107] env[61648]: result = function(*args, **kwargs) [ 534.523107] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 534.523107] env[61648]: return func(*args, **kwargs) [ 534.523107] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.523107] env[61648]: raise e [ 534.523107] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.523107] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 534.523107] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.523107] env[61648]: created_port_ids = self._update_ports_for_instance( [ 534.523107] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.523107] env[61648]: with excutils.save_and_reraise_exception(): [ 534.523107] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.523107] env[61648]: self.force_reraise() [ 534.523107] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.523107] env[61648]: raise self.value [ 534.523107] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.523107] env[61648]: updated_port = self._update_port( [ 534.523107] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.523107] env[61648]: _ensure_no_port_binding_failure(port) [ 534.523107] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.523107] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 534.524130] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 534.524130] env[61648]: Removing descriptor: 19 [ 534.524130] env[61648]: ERROR nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Traceback (most recent call last): [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] yield resources [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self.driver.spawn(context, instance, image_meta, [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.524130] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] vm_ref = self.build_virtual_machine(instance, [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] for vif in network_info: [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return self._sync_wrapper(fn, *args, **kwargs) [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self.wait() [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self[:] = self._gt.wait() [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return self._exit_event.wait() [ 534.524461] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] result = hub.switch() [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return self.greenlet.switch() [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] result = function(*args, **kwargs) [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return func(*args, **kwargs) [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] raise e [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] nwinfo = self.network_api.allocate_for_instance( [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.524823] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] created_port_ids = self._update_ports_for_instance( [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] with excutils.save_and_reraise_exception(): [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self.force_reraise() [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] raise self.value [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] updated_port = self._update_port( [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] _ensure_no_port_binding_failure(port) [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.525175] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] raise exception.PortBindingFailed(port_id=port['id']) [ 534.525498] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 534.525498] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] [ 534.525498] env[61648]: INFO nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Terminating instance [ 534.526823] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.527055] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquired lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.527183] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 534.559163] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquiring lock "74c00b03-bd37-49f7-b0b9-88404302c071" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.559163] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Lock "74c00b03-bd37-49f7-b0b9-88404302c071" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.660058] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Releasing lock "refresh_cache-767c2c81-2508-4dcd-97d7-28726c2c6d31" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.660058] env[61648]: DEBUG nova.compute.manager [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 534.660058] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.660058] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a65f47-e588-4d06-86c2-37dd52f9b389 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.667773] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 534.669831] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c04e0a45-4599-4ecd-b791-b1bdf45bcf47 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.675027] env[61648]: DEBUG oslo_vmware.api [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 534.675027] env[61648]: value = "task-1336596" [ 534.675027] env[61648]: _type = "Task" [ 534.675027] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.685392] env[61648]: DEBUG oslo_vmware.api [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.694684] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-77ce1c04-88c1-4df5-9436-4f1878217c4a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.694954] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 534.695162] env[61648]: DEBUG nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.695360] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.726036] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.834086] env[61648]: DEBUG nova.scheduler.client.report [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.051947] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.185309] env[61648]: DEBUG oslo_vmware.api [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336596, 'name': PowerOffVM_Task, 'duration_secs': 0.124452} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.186104] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.187734] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 535.190018] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 535.190018] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd41558b-df9b-4ec5-8f4e-65107003c9c7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.223854] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 535.223854] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 535.224080] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleting the datastore file [datastore2] 767c2c81-2508-4dcd-97d7-28726c2c6d31 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 535.224674] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39143331-7892-4d13-8054-e8a8c9300ff9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.229956] env[61648]: DEBUG nova.network.neutron [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.242281] env[61648]: DEBUG oslo_vmware.api [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for the task: (returnval){ [ 535.242281] env[61648]: value = "task-1336598" [ 535.242281] env[61648]: _type = "Task" [ 535.242281] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.246016] env[61648]: INFO nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Deleted allocations for instance 4deb5eaa-e8e2-41cc-aef9-722235e69b95 [ 535.269909] env[61648]: DEBUG oslo_vmware.api [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.342031] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.912s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.342031] env[61648]: ERROR nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Traceback (most recent call last): [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self.driver.spawn(context, instance, image_meta, [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 535.342031] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] vm_ref = self.build_virtual_machine(instance, [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] vif_infos = vmwarevif.get_vif_info(self._session, [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] for vif in network_info: [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return self._sync_wrapper(fn, *args, **kwargs) [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self.wait() [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self[:] = self._gt.wait() [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return self._exit_event.wait() [ 535.342618] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] result = hub.switch() [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return self.greenlet.switch() [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] result = function(*args, **kwargs) [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] return func(*args, **kwargs) [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] raise e [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] nwinfo = self.network_api.allocate_for_instance( [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 535.343069] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] created_port_ids = self._update_ports_for_instance( [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] with excutils.save_and_reraise_exception(): [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] self.force_reraise() [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] raise self.value [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] updated_port = self._update_port( [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] _ensure_no_port_binding_failure(port) [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.343405] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] raise exception.PortBindingFailed(port_id=port['id']) [ 535.343720] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] nova.exception.PortBindingFailed: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. [ 535.343720] env[61648]: ERROR nova.compute.manager [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] [ 535.343720] env[61648]: DEBUG nova.compute.utils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 535.351018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.845s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.351018] env[61648]: INFO nova.compute.claims [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.358161] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Build of instance 368c44e8-756c-4b11-8a63-9f69e007769c was re-scheduled: Binding failed for port 00d65201-eb20-4b59-ad50-d22d8c516380, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 535.359068] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 535.359068] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquiring lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.359068] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Acquired lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.359239] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 535.492438] env[61648]: DEBUG nova.compute.manager [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Received event network-changed-10601d2a-d3de-4fbc-ba87-2c5d7c6816df {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 535.492633] env[61648]: DEBUG nova.compute.manager [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Refreshing instance network info cache due to event network-changed-10601d2a-d3de-4fbc-ba87-2c5d7c6816df. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 535.492815] env[61648]: DEBUG oslo_concurrency.lockutils [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] Acquiring lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.688669] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Releasing lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.689359] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 535.689359] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 535.690065] env[61648]: DEBUG oslo_concurrency.lockutils [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] Acquired lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.690065] env[61648]: DEBUG nova.network.neutron [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Refreshing network info cache for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 535.690910] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-644a003d-aaec-4787-a3c2-20996c1af590 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.702338] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7042f2a3-232a-48f2-9d55-f5da91076c14 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.729061] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6bbb34d-86a7-4686-ba60-0e418623e9fb could not be found. [ 535.729061] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.729061] env[61648]: INFO nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 535.729335] env[61648]: DEBUG oslo.service.loopingcall [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.729617] env[61648]: DEBUG nova.compute.manager [-] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 535.729750] env[61648]: DEBUG nova.network.neutron [-] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 535.740017] env[61648]: INFO nova.compute.manager [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 77ce1c04-88c1-4df5-9436-4f1878217c4a] Took 1.04 seconds to deallocate network for instance. [ 535.757634] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "4deb5eaa-e8e2-41cc-aef9-722235e69b95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.333s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.767322] env[61648]: DEBUG oslo_vmware.api [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Task: {'id': task-1336598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108156} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.767800] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 535.767982] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 535.768229] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.768396] env[61648]: INFO nova.compute.manager [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Took 1.11 seconds to destroy the instance on the hypervisor. [ 535.772024] env[61648]: DEBUG oslo.service.loopingcall [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.772024] env[61648]: DEBUG nova.compute.manager [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 535.772024] env[61648]: DEBUG nova.network.neutron [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 535.777294] env[61648]: DEBUG nova.network.neutron [-] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.815688] env[61648]: DEBUG nova.network.neutron [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.892762] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.099247] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.236935] env[61648]: DEBUG nova.network.neutron [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.269200] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 536.282149] env[61648]: DEBUG nova.network.neutron [-] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.321154] env[61648]: DEBUG nova.network.neutron [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.481960] env[61648]: DEBUG nova.network.neutron [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.604495] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Releasing lock "refresh_cache-368c44e8-756c-4b11-8a63-9f69e007769c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.606142] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 536.606355] env[61648]: DEBUG nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 536.606528] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 536.634730] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.704350] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc07b174-02fd-4973-a6f8-9dbaa32f0b32 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.712260] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13baf360-7424-4504-bf5a-b02f7924fe60 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.748753] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2302ca-e9a1-4377-bd6f-099219893111 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.759963] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98aa7c3-9a18-471f-a76b-7366e9d6db4c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.776093] env[61648]: DEBUG nova.compute.provider_tree [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.786049] env[61648]: INFO nova.scheduler.client.report [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Deleted allocations for instance 77ce1c04-88c1-4df5-9436-4f1878217c4a [ 536.797773] env[61648]: INFO nova.compute.manager [-] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Took 1.07 seconds to deallocate network for instance. [ 536.801024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.803469] env[61648]: DEBUG nova.compute.claims [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 536.803670] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.823232] env[61648]: INFO nova.compute.manager [-] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Took 1.05 seconds to deallocate network for instance. [ 536.990880] env[61648]: DEBUG oslo_concurrency.lockutils [req-4bb967aa-0d8f-4402-b806-abb6e807f976 req-6e19f363-fc07-499b-96ca-5b8c7c66a3f9 service nova] Releasing lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.139120] env[61648]: DEBUG nova.network.neutron [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.281010] env[61648]: DEBUG nova.scheduler.client.report [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.307071] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1a9daccf-e894-44a2-b1f5-3622504e5e06 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "77ce1c04-88c1-4df5-9436-4f1878217c4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.817s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.333018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.642581] env[61648]: INFO nova.compute.manager [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] [instance: 368c44e8-756c-4b11-8a63-9f69e007769c] Took 1.04 seconds to deallocate network for instance. [ 537.790182] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.790726] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 537.793945] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.518s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.812174] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.301940] env[61648]: DEBUG nova.compute.utils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.307727] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 538.307900] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 538.345509] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.454100] env[61648]: DEBUG nova.policy [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30b82480a0304b0aa24d641b7758d4ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8beeea85c1074df2a9eac7339fdad0cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 538.697020] env[61648]: INFO nova.scheduler.client.report [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Deleted allocations for instance 368c44e8-756c-4b11-8a63-9f69e007769c [ 538.810181] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 538.852584] env[61648]: WARNING nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 767c2c81-2508-4dcd-97d7-28726c2c6d31 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 538.852584] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 68e5fc5b-a843-4f49-a903-4ed145d63fd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 538.852584] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 9a03a74b-7fad-4338-ae6f-82c493cd44e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 538.852584] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance d6bbb34d-86a7-4686-ba60-0e418623e9fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 538.852829] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 7cee9c25-69f4-4ceb-ba48-0cc246657fdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 538.946293] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "e918b827-ea37-4589-8999-e363aba4492d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.946815] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "e918b827-ea37-4589-8999-e363aba4492d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.099465] env[61648]: DEBUG nova.compute.manager [req-e118af9a-a230-48d5-b76b-80e876762ea4 req-2203da7c-345d-4b0b-ae74-36d167611030 service nova] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Received event network-vif-deleted-10601d2a-d3de-4fbc-ba87-2c5d7c6816df {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.207913] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5e9339d2-ee32-4df2-bcdc-b1007809939c tempest-InstanceActionsNegativeTestJSON-1240127009 tempest-InstanceActionsNegativeTestJSON-1240127009-project-member] Lock "368c44e8-756c-4b11-8a63-9f69e007769c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.369s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.250928] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Successfully created port: 1531534c-0121-491e-8821-21995c088b37 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.298298] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "16e92598-3eba-42c1-b9ec-3b2b91231267" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.298712] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "16e92598-3eba-42c1-b9ec-3b2b91231267" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.359130] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance f03f349e-d5ed-437b-8b13-6b036f2b88dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 539.713932] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 539.827897] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 539.862192] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 3c252464-1b1a-4c1a-86bb-2fb0107aa52f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 539.868021] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.868021] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.868021] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.868271] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.868271] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.868271] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.868271] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.868412] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.868623] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.868821] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.869173] env[61648]: DEBUG nova.virt.hardware [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.870354] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009cefd2-d55e-4f45-aebd-52b5ffd5b13a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.883988] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda1c057-7e54-4991-801e-a767a3ab876a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.990251] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "25168ea9-24fb-4f63-b508-b5c3a47a77e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.990486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "25168ea9-24fb-4f63-b508-b5c3a47a77e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.246186] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.376825] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 540.738112] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "d23a9ab2-01ed-4d41-b89f-445ecc5f410f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.738112] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "d23a9ab2-01ed-4d41-b89f-445ecc5f410f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.884685] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance d186a201-4ef8-40a6-9625-5e8ae99af4d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 541.227422] env[61648]: ERROR nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 541.227422] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.227422] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.227422] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.227422] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.227422] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.227422] env[61648]: ERROR nova.compute.manager raise self.value [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.227422] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 541.227422] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.227422] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 541.228507] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.228507] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 541.228507] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 541.228507] env[61648]: ERROR nova.compute.manager [ 541.228507] env[61648]: Traceback (most recent call last): [ 541.228507] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 541.228507] env[61648]: listener.cb(fileno) [ 541.228507] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.228507] env[61648]: result = function(*args, **kwargs) [ 541.228507] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.228507] env[61648]: return func(*args, **kwargs) [ 541.228507] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.228507] env[61648]: raise e [ 541.228507] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.228507] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 541.228507] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.228507] env[61648]: created_port_ids = self._update_ports_for_instance( [ 541.228507] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.228507] env[61648]: with excutils.save_and_reraise_exception(): [ 541.228507] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.228507] env[61648]: self.force_reraise() [ 541.228507] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.228507] env[61648]: raise self.value [ 541.228507] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.228507] env[61648]: updated_port = self._update_port( [ 541.228507] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.228507] env[61648]: _ensure_no_port_binding_failure(port) [ 541.228507] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.228507] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 541.229274] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 541.229274] env[61648]: Removing descriptor: 16 [ 541.229274] env[61648]: ERROR nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Traceback (most recent call last): [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] yield resources [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self.driver.spawn(context, instance, image_meta, [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.229274] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] vm_ref = self.build_virtual_machine(instance, [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] for vif in network_info: [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return self._sync_wrapper(fn, *args, **kwargs) [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self.wait() [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self[:] = self._gt.wait() [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return self._exit_event.wait() [ 541.229735] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] result = hub.switch() [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return self.greenlet.switch() [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] result = function(*args, **kwargs) [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return func(*args, **kwargs) [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] raise e [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] nwinfo = self.network_api.allocate_for_instance( [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.230101] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] created_port_ids = self._update_ports_for_instance( [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] with excutils.save_and_reraise_exception(): [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self.force_reraise() [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] raise self.value [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] updated_port = self._update_port( [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] _ensure_no_port_binding_failure(port) [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.230476] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] raise exception.PortBindingFailed(port_id=port['id']) [ 541.230833] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 541.230833] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] [ 541.230833] env[61648]: INFO nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Terminating instance [ 541.234020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.234197] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquired lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.234366] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 541.389330] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance c2dadf90-2469-4df6-bcc4-dd65d8a748bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 541.628823] env[61648]: DEBUG nova.compute.manager [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Received event network-changed-1531534c-0121-491e-8821-21995c088b37 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 541.628823] env[61648]: DEBUG nova.compute.manager [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Refreshing instance network info cache due to event network-changed-1531534c-0121-491e-8821-21995c088b37. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 541.628823] env[61648]: DEBUG oslo_concurrency.lockutils [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] Acquiring lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.775068] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.895274] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 66562ea6-5d39-4b98-a9e2-0512295ab94f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 542.044981] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.397125] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance e51d3f4e-41d5-4190-a8e9-21c743aa3b5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 542.549091] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Releasing lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.549571] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 542.549767] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 542.550087] env[61648]: DEBUG oslo_concurrency.lockutils [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] Acquired lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.550255] env[61648]: DEBUG nova.network.neutron [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Refreshing network info cache for port 1531534c-0121-491e-8821-21995c088b37 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 542.551282] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-baab03fa-1a68-4f99-9abf-111d797b2978 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.570087] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60b3080-43a6-4c77-9c50-98afb9e990a0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.600021] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7cee9c25-69f4-4ceb-ba48-0cc246657fdf could not be found. [ 542.600021] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 542.600021] env[61648]: INFO nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Took 0.05 seconds to destroy the instance on the hypervisor. [ 542.600021] env[61648]: DEBUG oslo.service.loopingcall [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.600021] env[61648]: DEBUG nova.compute.manager [-] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 542.600021] env[61648]: DEBUG nova.network.neutron [-] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 542.628330] env[61648]: DEBUG nova.network.neutron [-] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.901672] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 74c00b03-bd37-49f7-b0b9-88404302c071 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 543.052184] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "d1713c19-45cc-4d33-8b23-a9516bbaa25f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.053069] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "d1713c19-45cc-4d33-8b23-a9516bbaa25f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.093677] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "87971b67-572c-4d5f-99b7-dab08aea10c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.093677] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "87971b67-572c-4d5f-99b7-dab08aea10c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.135307] env[61648]: DEBUG nova.network.neutron [-] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.167536] env[61648]: DEBUG nova.network.neutron [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.320793] env[61648]: DEBUG nova.network.neutron [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.406070] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance e918b827-ea37-4589-8999-e363aba4492d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 543.641758] env[61648]: INFO nova.compute.manager [-] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Took 1.04 seconds to deallocate network for instance. [ 543.643632] env[61648]: DEBUG nova.compute.claims [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 543.644157] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.824018] env[61648]: DEBUG oslo_concurrency.lockutils [req-e9890e9d-9279-429e-828e-e884cdde2985 req-eafb796b-0ce9-430f-ba9d-0cd55b31d291 service nova] Releasing lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.910274] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 16e92598-3eba-42c1-b9ec-3b2b91231267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 543.910624] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 543.910680] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 544.229040] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5649c933-d1f7-4993-8a79-2d4588a4b6f3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.239766] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a8b549-893a-41d2-8f0a-0cfe782ae5e9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.273466] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0faaa2-9429-4b3f-b386-f6e72d194eed {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.282257] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db852af6-37da-4545-8b27-f1674e30fa94 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.298773] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.803644] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.310408] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 545.310679] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.517s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.310943] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.147s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.698122] env[61648]: DEBUG nova.compute.manager [req-0e863855-9ad4-4a30-b36c-9ce4d29c867a req-60f7b658-9026-45fa-b054-3c1cfed510af service nova] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Received event network-vif-deleted-1531534c-0121-491e-8821-21995c088b37 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.170807] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c71cbb-10f5-4c42-b5e2-c17f5e2d5599 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.180128] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a34d84-3311-46b5-9e11-92269072d009 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.214888] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c42e8b-bb39-48cf-af7e-5a1e946ad321 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.223244] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97534474-d8d9-42af-aa5e-01a075e1d213 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.239451] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 546.767028] env[61648]: ERROR nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [req-c08be465-01ec-4579-9c72-4eacad175ecd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c08be465-01ec-4579-9c72-4eacad175ecd"}]}: nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 546.789963] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 546.817288] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 546.817523] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 546.840308] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 546.879123] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 547.385936] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2eb1d64-6575-48b1-a94e-2173bedc8823 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.395830] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad5643d-55cc-4756-b381-fd40dc3bf7a1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.438598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb5d449-1de7-422c-9dab-36e846001edd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.446272] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d8940d-dc4a-4853-ad3f-e16879b65769 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.466101] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 548.015454] env[61648]: ERROR nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [req-6ad5eb8f-240d-43ae-be16-58dbccf82535] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6ad5eb8f-240d-43ae-be16-58dbccf82535"}]}: nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 548.050025] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 548.082615] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 548.084025] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 548.102921] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: ceb63cda-af03-4a4f-9028-32769b581a0c {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 548.141074] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 548.594476] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8960a8cc-5091-4f3a-bc1c-33fbca606d38 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.601087] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02c104d-8d96-4b47-a583-a295524b435d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.644276] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5af4ba-53ba-49fd-8f5e-8d2b00e25689 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.653345] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1384af41-bab5-4c1b-bf9f-63a55ec38534 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.669366] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 549.236272] env[61648]: DEBUG nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 32 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 549.236597] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 32 to 33 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 549.240016] env[61648]: DEBUG nova.compute.provider_tree [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 549.753788] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 4.443s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.754482] env[61648]: ERROR nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Traceback (most recent call last): [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self.driver.spawn(context, instance, image_meta, [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] vm_ref = self.build_virtual_machine(instance, [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.754482] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] for vif in network_info: [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return self._sync_wrapper(fn, *args, **kwargs) [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self.wait() [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self[:] = self._gt.wait() [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return self._exit_event.wait() [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] result = hub.switch() [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.756341] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return self.greenlet.switch() [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] result = function(*args, **kwargs) [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] return func(*args, **kwargs) [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] raise e [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] nwinfo = self.network_api.allocate_for_instance( [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] created_port_ids = self._update_ports_for_instance( [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] with excutils.save_and_reraise_exception(): [ 549.756772] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] self.force_reraise() [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] raise self.value [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] updated_port = self._update_port( [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] _ensure_no_port_binding_failure(port) [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] raise exception.PortBindingFailed(port_id=port['id']) [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] nova.exception.PortBindingFailed: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. [ 549.757156] env[61648]: ERROR nova.compute.manager [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] [ 549.757486] env[61648]: DEBUG nova.compute.utils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 549.757486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.213s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.759032] env[61648]: INFO nova.compute.claims [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.765304] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Build of instance 68e5fc5b-a843-4f49-a903-4ed145d63fd7 was re-scheduled: Binding failed for port 243befa8-9485-409f-bc25-a981f5c4d0bc, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 549.765304] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 549.766459] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.766459] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquired lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.766459] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.294228] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.591449] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.714756] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "3cd90969-d884-49fc-a2c3-8501e2c51ff6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.715010] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "3cd90969-d884-49fc-a2c3-8501e2c51ff6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.101496] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Releasing lock "refresh_cache-68e5fc5b-a843-4f49-a903-4ed145d63fd7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.101496] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 551.101496] env[61648]: DEBUG nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.101496] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 551.139566] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9f22c2-b089-4d85-bd96-aee798d8c1ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.152610] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4a18c4-f53a-4c63-9916-163bd8d000c5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.165067] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.202635] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828cc22b-2e4b-44c5-9a35-8b7ae55722ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.214946] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45893812-ef94-40b0-9482-aa9a1ccbfd3e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.238967] env[61648]: DEBUG nova.compute.provider_tree [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.518880] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquiring lock "49284c45-b77b-4992-8437-d9d31ba12539" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.518880] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Lock "49284c45-b77b-4992-8437-d9d31ba12539" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.668577] env[61648]: DEBUG nova.network.neutron [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.742245] env[61648]: DEBUG nova.scheduler.client.report [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.170946] env[61648]: INFO nova.compute.manager [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: 68e5fc5b-a843-4f49-a903-4ed145d63fd7] Took 1.07 seconds to deallocate network for instance. [ 552.252404] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.252914] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 552.256495] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.019s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.258018] env[61648]: INFO nova.compute.claims [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.762069] env[61648]: DEBUG nova.compute.utils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.769578] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 552.769578] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 552.869476] env[61648]: DEBUG nova.policy [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0ab99bb72a14593868586021ab3f515', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1cb2598a007b4973814e853c84cb4413', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 553.208418] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquiring lock "baf240b6-0a42-485f-9176-78dda5de3c7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.209041] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Lock "baf240b6-0a42-485f-9176-78dda5de3c7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.239192] env[61648]: INFO nova.scheduler.client.report [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Deleted allocations for instance 68e5fc5b-a843-4f49-a903-4ed145d63fd7 [ 553.269794] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 553.550374] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Successfully created port: 5b10ac06-87b4-4ff9-b72f-093167b1d8cb {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.739700] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dabb7b-8d78-4f4a-a72a-b986d6fe914a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.754346] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14948a2f-e562-4662-9dd9-3ff0f4901152 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.759486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-718f89a6-0081-4ced-89bf-57f238ee75e0 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "68e5fc5b-a843-4f49-a903-4ed145d63fd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.991s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.798142] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe1459d-f6b6-4620-a274-946157c81283 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.807911] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a42be5-59bd-4bb1-9845-ea317d99b194 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.825703] env[61648]: DEBUG nova.compute.provider_tree [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.963600] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquiring lock "453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.965363] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Lock "453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.262582] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 554.299764] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 554.330505] env[61648]: DEBUG nova.scheduler.client.report [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 554.335910] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 554.335910] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 554.336134] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.336250] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 554.336395] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.336542] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 554.336744] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 554.336901] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 554.337074] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 554.337235] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 554.337763] env[61648]: DEBUG nova.virt.hardware [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 554.338290] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0b6558-19fb-498a-a81b-9738b1bcf2a8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.350641] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fbd513-003f-4509-b9e9-c0b5d8d06f9b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.798357] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.844019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.845042] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 554.848389] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.553s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.356305] env[61648]: DEBUG nova.compute.utils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.362978] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 555.364563] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 555.470107] env[61648]: DEBUG nova.policy [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86210ecf67644c95a6f4cbe574482c59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77ce49a8acfa4ae2a63ebaac18f2b1ad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 555.479022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquiring lock "c35f0f15-6c13-4fbe-9ac2-ab6262590c38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.479154] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Lock "c35f0f15-6c13-4fbe-9ac2-ab6262590c38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.570935] env[61648]: ERROR nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 555.570935] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.570935] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 555.570935] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 555.570935] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.570935] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.570935] env[61648]: ERROR nova.compute.manager raise self.value [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 555.570935] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 555.570935] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.570935] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 555.571532] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.571532] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 555.571532] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 555.571532] env[61648]: ERROR nova.compute.manager [ 555.572906] env[61648]: Traceback (most recent call last): [ 555.573055] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 555.573055] env[61648]: listener.cb(fileno) [ 555.573176] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 555.573176] env[61648]: result = function(*args, **kwargs) [ 555.573258] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 555.573258] env[61648]: return func(*args, **kwargs) [ 555.573332] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 555.573332] env[61648]: raise e [ 555.573405] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.573405] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 555.573481] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 555.573481] env[61648]: created_port_ids = self._update_ports_for_instance( [ 555.574168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 555.574168] env[61648]: with excutils.save_and_reraise_exception(): [ 555.574168] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.574168] env[61648]: self.force_reraise() [ 555.574168] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.574168] env[61648]: raise self.value [ 555.574168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 555.574168] env[61648]: updated_port = self._update_port( [ 555.574168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.574168] env[61648]: _ensure_no_port_binding_failure(port) [ 555.574168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.574168] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 555.574168] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 555.574168] env[61648]: Removing descriptor: 16 [ 555.575050] env[61648]: ERROR nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Traceback (most recent call last): [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] yield resources [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self.driver.spawn(context, instance, image_meta, [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] vm_ref = self.build_virtual_machine(instance, [ 555.575050] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] for vif in network_info: [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return self._sync_wrapper(fn, *args, **kwargs) [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self.wait() [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self[:] = self._gt.wait() [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return self._exit_event.wait() [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 555.575481] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] result = hub.switch() [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return self.greenlet.switch() [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] result = function(*args, **kwargs) [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return func(*args, **kwargs) [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] raise e [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] nwinfo = self.network_api.allocate_for_instance( [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] created_port_ids = self._update_ports_for_instance( [ 555.575973] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] with excutils.save_and_reraise_exception(): [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self.force_reraise() [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] raise self.value [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] updated_port = self._update_port( [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] _ensure_no_port_binding_failure(port) [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] raise exception.PortBindingFailed(port_id=port['id']) [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 555.576444] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] [ 555.579650] env[61648]: INFO nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Terminating instance [ 555.583596] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.583767] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquired lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.583932] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 555.847803] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2547eb-080f-4c46-a5b4-ceba50a1cd3f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.855165] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e40568b-4188-4045-a3f8-f61e11109ece {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.900744] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 555.904892] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec583f70-5f58-41b7-a200-424855e0af8f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.913772] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd0f972-eada-4c52-b1b6-7c20811d1910 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.930901] env[61648]: DEBUG nova.compute.provider_tree [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.112015] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.289744] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.384540] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.384824] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.435891] env[61648]: DEBUG nova.scheduler.client.report [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.569644] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Successfully created port: 66a46ce9-a11c-41e9-a22e-9650b67e2740 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 556.794644] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Releasing lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.795073] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 556.795295] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 556.795588] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2077791b-c6be-4dcf-985f-d46c29b39a6e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.809129] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa657c7f-d6b6-4c21-8a43-83e9ddf1518b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.841989] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f03f349e-d5ed-437b-8b13-6b036f2b88dc could not be found. [ 556.841989] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 556.841989] env[61648]: INFO nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 556.842291] env[61648]: DEBUG oslo.service.loopingcall [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.842437] env[61648]: DEBUG nova.compute.manager [-] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 556.842527] env[61648]: DEBUG nova.network.neutron [-] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 556.883182] env[61648]: DEBUG nova.network.neutron [-] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.911632] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 556.943624] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.095s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.944464] env[61648]: ERROR nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Traceback (most recent call last): [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self.driver.spawn(context, instance, image_meta, [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] vm_ref = self.build_virtual_machine(instance, [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.944464] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] for vif in network_info: [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return self._sync_wrapper(fn, *args, **kwargs) [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self.wait() [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self[:] = self._gt.wait() [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return self._exit_event.wait() [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] result = hub.switch() [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.945145] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return self.greenlet.switch() [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] result = function(*args, **kwargs) [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] return func(*args, **kwargs) [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] raise e [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] nwinfo = self.network_api.allocate_for_instance( [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] created_port_ids = self._update_ports_for_instance( [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] with excutils.save_and_reraise_exception(): [ 556.945532] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] self.force_reraise() [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] raise self.value [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] updated_port = self._update_port( [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] _ensure_no_port_binding_failure(port) [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] raise exception.PortBindingFailed(port_id=port['id']) [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] nova.exception.PortBindingFailed: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. [ 556.946016] env[61648]: ERROR nova.compute.manager [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] [ 556.946363] env[61648]: DEBUG nova.compute.utils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 556.949322] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.531s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.949776] env[61648]: INFO nova.compute.claims [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.956654] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.957904] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.957904] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.957904] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.960019] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.960019] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.960019] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.960019] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.960019] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.960387] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.960387] env[61648]: DEBUG nova.virt.hardware [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.960387] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Build of instance 9a03a74b-7fad-4338-ae6f-82c493cd44e3 was re-scheduled: Binding failed for port a2ceba79-7d63-4f21-bfa4-ac44048c6d20, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 556.960525] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 556.962372] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquiring lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.962372] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Acquired lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.962372] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.964906] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc58a7a1-1dfd-4de9-8d03-9a75644966fc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.981958] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e25bf12-efab-4ae1-8026-48613ac34b8e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.033827] env[61648]: DEBUG nova.compute.manager [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Received event network-changed-5b10ac06-87b4-4ff9-b72f-093167b1d8cb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 557.034111] env[61648]: DEBUG nova.compute.manager [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Refreshing instance network info cache due to event network-changed-5b10ac06-87b4-4ff9-b72f-093167b1d8cb. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 557.034321] env[61648]: DEBUG oslo_concurrency.lockutils [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] Acquiring lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.034464] env[61648]: DEBUG oslo_concurrency.lockutils [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] Acquired lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.034621] env[61648]: DEBUG nova.network.neutron [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Refreshing network info cache for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 557.193865] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "8c8a339c-e52a-4257-9191-4e03ecf87b22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.194110] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "8c8a339c-e52a-4257-9191-4e03ecf87b22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.386337] env[61648]: DEBUG nova.network.neutron [-] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.506154] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.575876] env[61648]: DEBUG nova.network.neutron [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.663018] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.787962] env[61648]: DEBUG nova.network.neutron [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.889313] env[61648]: INFO nova.compute.manager [-] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Took 1.05 seconds to deallocate network for instance. [ 557.895318] env[61648]: DEBUG nova.compute.claims [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 557.895474] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.171693] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Releasing lock "refresh_cache-9a03a74b-7fad-4338-ae6f-82c493cd44e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.172595] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 558.172595] env[61648]: DEBUG nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 558.172595] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 558.225433] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.290752] env[61648]: DEBUG oslo_concurrency.lockutils [req-aa5b8e7d-1afc-4c05-8989-7cd59d06f57e req-57dd1f0a-bbe4-4d8d-a865-e1b48754d338 service nova] Releasing lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.523661] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fc580e-fd02-4ea9-ac2d-6138f7f5d088 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.531971] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1862a1-9782-4c31-9044-3d7e6f7e5821 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.565685] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0e215c-b794-44b5-b332-7815e3f96a99 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.577786] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3158c413-86af-41c5-b019-31691e2b28e5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.591542] env[61648]: DEBUG nova.compute.provider_tree [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.733733] env[61648]: DEBUG nova.network.neutron [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.868750] env[61648]: ERROR nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 558.868750] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.868750] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.868750] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.868750] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.868750] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.868750] env[61648]: ERROR nova.compute.manager raise self.value [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.868750] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 558.868750] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.868750] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 558.869303] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.869303] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 558.869303] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 558.869303] env[61648]: ERROR nova.compute.manager [ 558.869303] env[61648]: Traceback (most recent call last): [ 558.869303] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 558.869303] env[61648]: listener.cb(fileno) [ 558.869303] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.869303] env[61648]: result = function(*args, **kwargs) [ 558.869303] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.869303] env[61648]: return func(*args, **kwargs) [ 558.869303] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.869303] env[61648]: raise e [ 558.869303] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.869303] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 558.869303] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.869303] env[61648]: created_port_ids = self._update_ports_for_instance( [ 558.869303] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.869303] env[61648]: with excutils.save_and_reraise_exception(): [ 558.869303] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.869303] env[61648]: self.force_reraise() [ 558.869303] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.869303] env[61648]: raise self.value [ 558.869303] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.869303] env[61648]: updated_port = self._update_port( [ 558.869303] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.869303] env[61648]: _ensure_no_port_binding_failure(port) [ 558.869303] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.869303] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 558.870173] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 558.870173] env[61648]: Removing descriptor: 19 [ 558.870173] env[61648]: ERROR nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Traceback (most recent call last): [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] yield resources [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self.driver.spawn(context, instance, image_meta, [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.870173] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] vm_ref = self.build_virtual_machine(instance, [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] for vif in network_info: [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return self._sync_wrapper(fn, *args, **kwargs) [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self.wait() [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self[:] = self._gt.wait() [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return self._exit_event.wait() [ 558.870575] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] result = hub.switch() [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return self.greenlet.switch() [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] result = function(*args, **kwargs) [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return func(*args, **kwargs) [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] raise e [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] nwinfo = self.network_api.allocate_for_instance( [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.871019] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] created_port_ids = self._update_ports_for_instance( [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] with excutils.save_and_reraise_exception(): [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self.force_reraise() [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] raise self.value [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] updated_port = self._update_port( [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] _ensure_no_port_binding_failure(port) [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.871409] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] raise exception.PortBindingFailed(port_id=port['id']) [ 558.871786] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 558.871786] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] [ 558.871786] env[61648]: INFO nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Terminating instance [ 558.872486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquiring lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.872570] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquired lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.872716] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 559.096137] env[61648]: DEBUG nova.scheduler.client.report [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 559.244215] env[61648]: INFO nova.compute.manager [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] [instance: 9a03a74b-7fad-4338-ae6f-82c493cd44e3] Took 1.07 seconds to deallocate network for instance. [ 559.255888] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.256391] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.412136] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.515185] env[61648]: DEBUG nova.compute.manager [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Received event network-changed-66a46ce9-a11c-41e9-a22e-9650b67e2740 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 559.515185] env[61648]: DEBUG nova.compute.manager [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Refreshing instance network info cache due to event network-changed-66a46ce9-a11c-41e9-a22e-9650b67e2740. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 559.515185] env[61648]: DEBUG oslo_concurrency.lockutils [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] Acquiring lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.601115] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.601115] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 559.604415] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.806s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.606332] env[61648]: INFO nova.compute.claims [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 559.627721] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.051385] env[61648]: DEBUG nova.compute.manager [req-8ac03844-b5a8-421d-9b3a-8254da218e0d req-323697f8-f6bc-492a-bca3-6c0e04fafcc1 service nova] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Received event network-vif-deleted-5b10ac06-87b4-4ff9-b72f-093167b1d8cb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 560.111565] env[61648]: DEBUG nova.compute.utils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 560.114527] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 560.114779] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 560.135069] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Releasing lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.135069] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 560.135069] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 560.135069] env[61648]: DEBUG oslo_concurrency.lockutils [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] Acquired lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.135069] env[61648]: DEBUG nova.network.neutron [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Refreshing network info cache for port 66a46ce9-a11c-41e9-a22e-9650b67e2740 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 560.135333] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0e0c6c5-34fa-4831-b73e-951cea6ae58b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.146332] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fc8446-c843-4a0b-8619-8ac344aafff2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.170094] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3c252464-1b1a-4c1a-86bb-2fb0107aa52f could not be found. [ 560.170453] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 560.171141] env[61648]: INFO nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 560.171141] env[61648]: DEBUG oslo.service.loopingcall [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.171373] env[61648]: DEBUG nova.compute.manager [-] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.171583] env[61648]: DEBUG nova.network.neutron [-] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 560.295055] env[61648]: INFO nova.scheduler.client.report [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Deleted allocations for instance 9a03a74b-7fad-4338-ae6f-82c493cd44e3 [ 560.306152] env[61648]: DEBUG nova.network.neutron [-] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.423908] env[61648]: DEBUG nova.policy [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f945b6cf4fa3454fa9e94f861fb0c134', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '242c8ed692cb40daa0d7db3d00629278', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 560.484340] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquiring lock "33e270fd-0393-4425-8312-1e9fc91f3d1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.484849] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Lock "33e270fd-0393-4425-8312-1e9fc91f3d1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.619692] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 560.682829] env[61648]: DEBUG nova.network.neutron [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.806362] env[61648]: DEBUG oslo_concurrency.lockutils [None req-37df9084-59c4-46c9-9894-584c2945464c tempest-ServerDiagnosticsNegativeTest-1430962000 tempest-ServerDiagnosticsNegativeTest-1430962000-project-member] Lock "9a03a74b-7fad-4338-ae6f-82c493cd44e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.406s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.811593] env[61648]: DEBUG nova.network.neutron [-] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.986317] env[61648]: DEBUG nova.network.neutron [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.213817] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e81a91-61a4-46a1-9d55-51849211e453 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.222084] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072a2d89-f432-48fe-bb9b-1ef3281c39d2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.258295] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f72e9a-4957-43e1-8cda-d9ea4329a548 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.266214] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5967c04b-3dea-4dd8-a9c7-fae20dbb975f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.282594] env[61648]: DEBUG nova.compute.provider_tree [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.312732] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 561.315900] env[61648]: INFO nova.compute.manager [-] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Took 1.14 seconds to deallocate network for instance. [ 561.319137] env[61648]: DEBUG nova.compute.claims [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 561.319311] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.344471] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Successfully created port: c460a3c2-72e9-4b13-a346-e7d0ac4a12ed {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 561.492432] env[61648]: DEBUG oslo_concurrency.lockutils [req-850c0b54-7571-43c6-bfba-5e0b34ab42bb req-b0298676-e970-4770-9a38-27948385842a service nova] Releasing lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.635427] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 561.667523] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1902165863',id=25,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-386921556',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.670696] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.670696] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.670696] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.670696] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.670696] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.671058] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.671058] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.671058] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.671058] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.671058] env[61648]: DEBUG nova.virt.hardware [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.671222] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f775e2d-e159-420a-9e28-94b08a563b7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.682419] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cde703-91ca-4e85-8f4e-78fb5f7e5b94 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.790156] env[61648]: DEBUG nova.scheduler.client.report [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 561.844431] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.078851] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "be7776f1-0083-4c40-a7e6-477c0c65f7bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.079092] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "be7776f1-0083-4c40-a7e6-477c0c65f7bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.298265] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.298792] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 562.303256] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.499s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.473871] env[61648]: DEBUG nova.compute.manager [req-060d0266-b3dc-4915-ab1b-3aa56667ccf1 req-71ee50bb-8c2a-4fa3-a0c2-a3df2237bb2b service nova] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Received event network-vif-deleted-66a46ce9-a11c-41e9-a22e-9650b67e2740 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 562.810015] env[61648]: DEBUG nova.compute.utils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 562.817664] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 562.817900] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 562.954675] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquiring lock "9008460a-6b35-468d-803c-d10c139494f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.958351] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Lock "9008460a-6b35-468d-803c-d10c139494f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.991094] env[61648]: DEBUG nova.policy [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30b82480a0304b0aa24d641b7758d4ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8beeea85c1074df2a9eac7339fdad0cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 563.316873] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 563.349816] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f127c40d-d51c-45bd-8b58-e4700be86d0f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.361164] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02cf505-8bbb-4d01-839b-35c098c01b1b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.392542] env[61648]: ERROR nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 563.392542] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.392542] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.392542] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.392542] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.392542] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.392542] env[61648]: ERROR nova.compute.manager raise self.value [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.392542] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 563.392542] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.392542] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 563.394822] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.394822] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 563.394822] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 563.394822] env[61648]: ERROR nova.compute.manager [ 563.394822] env[61648]: Traceback (most recent call last): [ 563.394822] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 563.394822] env[61648]: listener.cb(fileno) [ 563.394822] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.394822] env[61648]: result = function(*args, **kwargs) [ 563.394822] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.394822] env[61648]: return func(*args, **kwargs) [ 563.394822] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.394822] env[61648]: raise e [ 563.394822] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.394822] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 563.394822] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.394822] env[61648]: created_port_ids = self._update_ports_for_instance( [ 563.394822] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.394822] env[61648]: with excutils.save_and_reraise_exception(): [ 563.394822] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.394822] env[61648]: self.force_reraise() [ 563.394822] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.394822] env[61648]: raise self.value [ 563.394822] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.394822] env[61648]: updated_port = self._update_port( [ 563.394822] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.394822] env[61648]: _ensure_no_port_binding_failure(port) [ 563.394822] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.394822] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 563.396107] env[61648]: nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 563.396107] env[61648]: Removing descriptor: 19 [ 563.396107] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c0b98b-1645-4b0d-87af-aea2793940d3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.397213] env[61648]: ERROR nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Traceback (most recent call last): [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] yield resources [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self.driver.spawn(context, instance, image_meta, [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] vm_ref = self.build_virtual_machine(instance, [ 563.397213] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] for vif in network_info: [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return self._sync_wrapper(fn, *args, **kwargs) [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self.wait() [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self[:] = self._gt.wait() [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return self._exit_event.wait() [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.397743] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] result = hub.switch() [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return self.greenlet.switch() [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] result = function(*args, **kwargs) [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return func(*args, **kwargs) [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] raise e [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] nwinfo = self.network_api.allocate_for_instance( [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] created_port_ids = self._update_ports_for_instance( [ 563.400234] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] with excutils.save_and_reraise_exception(): [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self.force_reraise() [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] raise self.value [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] updated_port = self._update_port( [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] _ensure_no_port_binding_failure(port) [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] raise exception.PortBindingFailed(port_id=port['id']) [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 563.400642] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] [ 563.401147] env[61648]: INFO nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Terminating instance [ 563.401147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquiring lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.401147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquired lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.401147] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 563.408176] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4ff433-8437-4db5-b81a-49ae1eab51f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.424469] env[61648]: DEBUG nova.compute.provider_tree [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.537627] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Successfully created port: ac3d7b2e-1aed-46e9-9873-bf19232cff48 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 563.929454] env[61648]: DEBUG nova.scheduler.client.report [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.938940] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.332869] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 564.342689] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.369448] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.369448] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.369448] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.369668] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.370082] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.371972] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.372809] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.373527] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.373923] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.374656] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.375034] env[61648]: DEBUG nova.virt.hardware [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.377118] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7d4004-447c-4198-91ac-160599c79781 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.386467] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43adb06f-e27d-42c5-82b6-2e8942bfa868 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.443826] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.141s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.444561] env[61648]: ERROR nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Traceback (most recent call last): [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self.driver.spawn(context, instance, image_meta, [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] vm_ref = self.build_virtual_machine(instance, [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.444561] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] for vif in network_info: [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return self._sync_wrapper(fn, *args, **kwargs) [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self.wait() [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self[:] = self._gt.wait() [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return self._exit_event.wait() [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] result = hub.switch() [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.445024] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return self.greenlet.switch() [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] result = function(*args, **kwargs) [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] return func(*args, **kwargs) [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] raise e [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] nwinfo = self.network_api.allocate_for_instance( [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] created_port_ids = self._update_ports_for_instance( [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] with excutils.save_and_reraise_exception(): [ 564.445450] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] self.force_reraise() [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] raise self.value [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] updated_port = self._update_port( [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] _ensure_no_port_binding_failure(port) [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] raise exception.PortBindingFailed(port_id=port['id']) [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] nova.exception.PortBindingFailed: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. [ 564.445865] env[61648]: ERROR nova.compute.manager [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] [ 564.447594] env[61648]: DEBUG nova.compute.utils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 564.447594] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.114s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.447594] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.448874] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.104s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.450547] env[61648]: INFO nova.compute.claims [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.453895] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Build of instance d6bbb34d-86a7-4686-ba60-0e418623e9fb was re-scheduled: Binding failed for port 10601d2a-d3de-4fbc-ba87-2c5d7c6816df, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 564.454071] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 564.454366] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.454366] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquired lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.454548] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.489230] env[61648]: INFO nova.scheduler.client.report [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Deleted allocations for instance 767c2c81-2508-4dcd-97d7-28726c2c6d31 [ 564.841999] env[61648]: DEBUG nova.compute.manager [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Received event network-changed-c460a3c2-72e9-4b13-a346-e7d0ac4a12ed {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 564.841999] env[61648]: DEBUG nova.compute.manager [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Refreshing instance network info cache due to event network-changed-c460a3c2-72e9-4b13-a346-e7d0ac4a12ed. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 564.841999] env[61648]: DEBUG oslo_concurrency.lockutils [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] Acquiring lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.848251] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Releasing lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.848251] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 564.848251] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 564.848251] env[61648]: DEBUG oslo_concurrency.lockutils [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] Acquired lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.848251] env[61648]: DEBUG nova.network.neutron [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Refreshing network info cache for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 564.848439] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b8db1de-82d7-4ebd-bee3-822038aa5245 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.868784] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b756d0c9-7424-460f-a0d3-3a50f1107174 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.893232] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b could not be found. [ 564.893520] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 564.893699] env[61648]: INFO nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 564.893931] env[61648]: DEBUG oslo.service.loopingcall [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.894185] env[61648]: DEBUG nova.compute.manager [-] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.894274] env[61648]: DEBUG nova.network.neutron [-] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 564.929250] env[61648]: DEBUG nova.network.neutron [-] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.993811] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.002795] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ca23656-a5c7-4000-906a-d4d125026a2a tempest-ServerDiagnosticsV248Test-226141477 tempest-ServerDiagnosticsV248Test-226141477-project-member] Lock "767c2c81-2508-4dcd-97d7-28726c2c6d31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.681s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.181135] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.380357] env[61648]: DEBUG nova.network.neutron [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.433787] env[61648]: DEBUG nova.network.neutron [-] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.533464] env[61648]: DEBUG nova.network.neutron [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.685553] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Releasing lock "refresh_cache-d6bbb34d-86a7-4686-ba60-0e418623e9fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.685821] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 565.686011] env[61648]: DEBUG nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 565.686216] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 565.718289] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.736255] env[61648]: ERROR nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 565.736255] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.736255] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.736255] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.736255] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.736255] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.736255] env[61648]: ERROR nova.compute.manager raise self.value [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.736255] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 565.736255] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.736255] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 565.737092] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.737092] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 565.737092] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 565.737092] env[61648]: ERROR nova.compute.manager [ 565.737092] env[61648]: Traceback (most recent call last): [ 565.737092] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 565.737092] env[61648]: listener.cb(fileno) [ 565.737092] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.737092] env[61648]: result = function(*args, **kwargs) [ 565.737092] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.737092] env[61648]: return func(*args, **kwargs) [ 565.737092] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.737092] env[61648]: raise e [ 565.737092] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.737092] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 565.737092] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.737092] env[61648]: created_port_ids = self._update_ports_for_instance( [ 565.737092] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.737092] env[61648]: with excutils.save_and_reraise_exception(): [ 565.737092] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.737092] env[61648]: self.force_reraise() [ 565.737092] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.737092] env[61648]: raise self.value [ 565.737092] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.737092] env[61648]: updated_port = self._update_port( [ 565.737092] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.737092] env[61648]: _ensure_no_port_binding_failure(port) [ 565.737092] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.737092] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 565.738075] env[61648]: nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 565.738075] env[61648]: Removing descriptor: 14 [ 565.738075] env[61648]: ERROR nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Traceback (most recent call last): [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] yield resources [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self.driver.spawn(context, instance, image_meta, [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.738075] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] vm_ref = self.build_virtual_machine(instance, [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] for vif in network_info: [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return self._sync_wrapper(fn, *args, **kwargs) [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self.wait() [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self[:] = self._gt.wait() [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return self._exit_event.wait() [ 565.738471] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] result = hub.switch() [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return self.greenlet.switch() [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] result = function(*args, **kwargs) [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return func(*args, **kwargs) [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] raise e [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] nwinfo = self.network_api.allocate_for_instance( [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.740179] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] created_port_ids = self._update_ports_for_instance( [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] with excutils.save_and_reraise_exception(): [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self.force_reraise() [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] raise self.value [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] updated_port = self._update_port( [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] _ensure_no_port_binding_failure(port) [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.741563] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] raise exception.PortBindingFailed(port_id=port['id']) [ 565.742602] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 565.742602] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] [ 565.742602] env[61648]: INFO nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Terminating instance [ 565.742602] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.742602] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquired lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.742602] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.937267] env[61648]: INFO nova.compute.manager [-] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Took 1.04 seconds to deallocate network for instance. [ 565.947955] env[61648]: DEBUG nova.compute.claims [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 565.947955] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.993837] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe37550-83c1-49cf-99ec-5a44beaa5dd2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.002692] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f69338-c35b-47bc-9c6d-de19fb7e7433 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.052863] env[61648]: DEBUG oslo_concurrency.lockutils [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] Releasing lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.052863] env[61648]: DEBUG nova.compute.manager [req-ce4830f1-7d1d-4982-8aae-180e78fdaa8f req-5873579c-fc97-4c95-98e6-ab7cfa8a3d0c service nova] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Received event network-vif-deleted-c460a3c2-72e9-4b13-a346-e7d0ac4a12ed {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 566.053038] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba5e1fc-4dea-4465-bc9f-5975d2536ace {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.067960] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5970f40c-b904-458a-8059-1632bf17fb61 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.083747] env[61648]: DEBUG nova.compute.provider_tree [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.224455] env[61648]: DEBUG nova.network.neutron [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.285108] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.477738] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.592164] env[61648]: DEBUG nova.scheduler.client.report [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.731403] env[61648]: INFO nova.compute.manager [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d6bbb34d-86a7-4686-ba60-0e418623e9fb] Took 1.04 seconds to deallocate network for instance. [ 566.929671] env[61648]: DEBUG nova.compute.manager [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Received event network-changed-ac3d7b2e-1aed-46e9-9873-bf19232cff48 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 566.929888] env[61648]: DEBUG nova.compute.manager [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Refreshing instance network info cache due to event network-changed-ac3d7b2e-1aed-46e9-9873-bf19232cff48. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 566.930377] env[61648]: DEBUG oslo_concurrency.lockutils [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] Acquiring lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.981036] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Releasing lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.981497] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 566.981927] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 566.982331] env[61648]: DEBUG oslo_concurrency.lockutils [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] Acquired lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.982510] env[61648]: DEBUG nova.network.neutron [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Refreshing network info cache for port ac3d7b2e-1aed-46e9-9873-bf19232cff48 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 566.983613] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c39ef459-ed5d-41d3-9ef3-fa83473691ef {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.995297] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848559ab-7c00-4951-aac7-a2bed7c4d578 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.027495] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d186a201-4ef8-40a6-9625-5e8ae99af4d1 could not be found. [ 567.027877] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 567.027877] env[61648]: INFO nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 567.028158] env[61648]: DEBUG oslo.service.loopingcall [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.028355] env[61648]: DEBUG nova.compute.manager [-] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 567.028442] env[61648]: DEBUG nova.network.neutron [-] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.059197] env[61648]: DEBUG nova.network.neutron [-] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.097148] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.099165] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 567.107238] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.861s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.112021] env[61648]: INFO nova.compute.claims [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.520861] env[61648]: DEBUG nova.network.neutron [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.563996] env[61648]: DEBUG nova.network.neutron [-] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.609471] env[61648]: DEBUG nova.compute.utils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.610956] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 567.614347] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 567.754808] env[61648]: DEBUG nova.network.neutron [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.779293] env[61648]: DEBUG nova.policy [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72c06e7cf9e94e929595e98899dd2e4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04cbdf8db91f43a29c2da6b61d014f03', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 567.780496] env[61648]: INFO nova.scheduler.client.report [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Deleted allocations for instance d6bbb34d-86a7-4686-ba60-0e418623e9fb [ 568.068879] env[61648]: INFO nova.compute.manager [-] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Took 1.04 seconds to deallocate network for instance. [ 568.072670] env[61648]: DEBUG nova.compute.claims [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 568.072670] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.119932] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 568.259393] env[61648]: DEBUG oslo_concurrency.lockutils [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] Releasing lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.259877] env[61648]: DEBUG nova.compute.manager [req-70f72d6e-fffe-4c0c-af07-27f5a4b9977c req-7b3116af-e3b8-45cb-9557-0026e8ddd39b service nova] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Received event network-vif-deleted-ac3d7b2e-1aed-46e9-9873-bf19232cff48 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.301291] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2c5cc8c6-f748-43d0-a3b0-d1c3f7712bfa tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "d6bbb34d-86a7-4686-ba60-0e418623e9fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.876s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.674802] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8534009-803c-4a76-890d-a399f5f22e88 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.684950] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ffc318-69b9-4c34-837d-d5f6ae0fb548 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.733220] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132417a4-186b-40aa-8175-15af3f1f0ed8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.746802] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6c0afc-1f15-4920-ab48-21cb9ab8deb4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.770208] env[61648]: DEBUG nova.compute.provider_tree [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.808184] env[61648]: DEBUG nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 569.041791] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Successfully created port: b57c77eb-fa48-4bba-a182-702c61029b4e {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.140028] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 569.181033] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.181033] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.181033] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.181264] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.181342] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.181833] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.181833] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.181833] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.182135] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.182462] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.182462] env[61648]: DEBUG nova.virt.hardware [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.183338] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c931a973-4c60-4e5c-a9b4-18d1e858202d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.193113] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709ab11d-840f-43a4-b56e-e2c31243439a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.246529] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "55630bdb-fe38-49dc-baa2-2ac5de20e569" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.246760] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "55630bdb-fe38-49dc-baa2-2ac5de20e569" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.276852] env[61648]: DEBUG nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.338657] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.782042] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.782657] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 569.792891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.145s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.296685] env[61648]: DEBUG nova.compute.utils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.301847] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 570.306138] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 570.646765] env[61648]: DEBUG nova.policy [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab719482a69c4ba2b0725bb68a05930c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e97d3c0049d747fe80907ef09f3ed754', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 570.758570] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "458302d3-123c-47e8-bee8-6fe1462d5f4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.759086] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "458302d3-123c-47e8-bee8-6fe1462d5f4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.802025] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 570.813419] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0276411-d4c9-402b-87f2-5cfb448042f7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.828641] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9472759a-1e68-411d-8b4b-3b23e5cb3c7b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.881646] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e3aa0e-3545-4949-8fd2-fc67fab669b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.890195] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594f85dd-1cb0-4888-8b07-b92c7a107346 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.906041] env[61648]: DEBUG nova.compute.provider_tree [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.413307] env[61648]: DEBUG nova.scheduler.client.report [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.668202] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Successfully created port: 00364354-7ff7-48d7-ba06-aaa2777ab71c {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.815252] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 571.848333] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.852021] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.852021] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.852021] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.852021] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.852021] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.852021] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.852468] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.852468] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.852468] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.852468] env[61648]: DEBUG nova.virt.hardware [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.852468] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94aa25fa-87d1-476d-9f45-562062fd77f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.863578] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84d40da-33f1-408e-92d7-bf9a5b669fe5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.921027] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.129s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.921027] env[61648]: ERROR nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Traceback (most recent call last): [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self.driver.spawn(context, instance, image_meta, [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.921027] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] vm_ref = self.build_virtual_machine(instance, [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] for vif in network_info: [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return self._sync_wrapper(fn, *args, **kwargs) [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self.wait() [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self[:] = self._gt.wait() [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return self._exit_event.wait() [ 571.921450] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] result = hub.switch() [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return self.greenlet.switch() [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] result = function(*args, **kwargs) [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] return func(*args, **kwargs) [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] raise e [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] nwinfo = self.network_api.allocate_for_instance( [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 571.921844] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] created_port_ids = self._update_ports_for_instance( [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] with excutils.save_and_reraise_exception(): [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] self.force_reraise() [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] raise self.value [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] updated_port = self._update_port( [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] _ensure_no_port_binding_failure(port) [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.922246] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] raise exception.PortBindingFailed(port_id=port['id']) [ 571.922607] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] nova.exception.PortBindingFailed: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. [ 571.922607] env[61648]: ERROR nova.compute.manager [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] [ 571.922607] env[61648]: DEBUG nova.compute.utils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 571.922607] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.124s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.925609] env[61648]: INFO nova.compute.claims [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.928974] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Build of instance 7cee9c25-69f4-4ceb-ba48-0cc246657fdf was re-scheduled: Binding failed for port 1531534c-0121-491e-8821-21995c088b37, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 571.929611] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 571.930304] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.930590] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquired lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.930871] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 572.264016] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "46672a70-7d6b-4a86-833b-a7583c71e595" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.264263] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "46672a70-7d6b-4a86-833b-a7583c71e595" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.483032] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.709029] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.867144] env[61648]: DEBUG nova.compute.manager [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Received event network-changed-b57c77eb-fa48-4bba-a182-702c61029b4e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.867417] env[61648]: DEBUG nova.compute.manager [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Refreshing instance network info cache due to event network-changed-b57c77eb-fa48-4bba-a182-702c61029b4e. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 572.867562] env[61648]: DEBUG oslo_concurrency.lockutils [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] Acquiring lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.867697] env[61648]: DEBUG oslo_concurrency.lockutils [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] Acquired lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.867853] env[61648]: DEBUG nova.network.neutron [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Refreshing network info cache for port b57c77eb-fa48-4bba-a182-702c61029b4e {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 573.211348] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Releasing lock "refresh_cache-7cee9c25-69f4-4ceb-ba48-0cc246657fdf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.211561] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 573.211733] env[61648]: DEBUG nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.211890] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 573.246517] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.334091] env[61648]: ERROR nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 573.334091] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.334091] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 573.334091] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 573.334091] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.334091] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.334091] env[61648]: ERROR nova.compute.manager raise self.value [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 573.334091] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 573.334091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.334091] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 573.335591] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.335591] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 573.335591] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 573.335591] env[61648]: ERROR nova.compute.manager [ 573.335591] env[61648]: Traceback (most recent call last): [ 573.335591] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 573.335591] env[61648]: listener.cb(fileno) [ 573.335591] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.335591] env[61648]: result = function(*args, **kwargs) [ 573.335591] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 573.335591] env[61648]: return func(*args, **kwargs) [ 573.335591] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 573.335591] env[61648]: raise e [ 573.335591] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.335591] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 573.335591] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 573.335591] env[61648]: created_port_ids = self._update_ports_for_instance( [ 573.335591] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 573.335591] env[61648]: with excutils.save_and_reraise_exception(): [ 573.335591] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.335591] env[61648]: self.force_reraise() [ 573.335591] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.335591] env[61648]: raise self.value [ 573.335591] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 573.335591] env[61648]: updated_port = self._update_port( [ 573.335591] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.335591] env[61648]: _ensure_no_port_binding_failure(port) [ 573.335591] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.335591] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 573.336624] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 573.336624] env[61648]: Removing descriptor: 19 [ 573.336624] env[61648]: ERROR nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Traceback (most recent call last): [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] yield resources [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self.driver.spawn(context, instance, image_meta, [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.336624] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] vm_ref = self.build_virtual_machine(instance, [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] for vif in network_info: [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return self._sync_wrapper(fn, *args, **kwargs) [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self.wait() [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self[:] = self._gt.wait() [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return self._exit_event.wait() [ 573.337033] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] result = hub.switch() [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return self.greenlet.switch() [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] result = function(*args, **kwargs) [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return func(*args, **kwargs) [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] raise e [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] nwinfo = self.network_api.allocate_for_instance( [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 573.337518] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] created_port_ids = self._update_ports_for_instance( [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] with excutils.save_and_reraise_exception(): [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self.force_reraise() [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] raise self.value [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] updated_port = self._update_port( [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] _ensure_no_port_binding_failure(port) [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.337939] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] raise exception.PortBindingFailed(port_id=port['id']) [ 573.339294] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 573.339294] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] [ 573.339294] env[61648]: INFO nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Terminating instance [ 573.339414] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquiring lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.430881] env[61648]: DEBUG nova.network.neutron [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.493116] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976ca0ab-5d9b-476b-bcbd-52a7d882a092 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.504537] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e814d792-3124-48df-b3be-39e0736935ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.545024] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6f17a6-1fc7-4d86-99e1-c93c9dbdcae9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.552025] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883207d6-442c-475a-9fe3-1785f18db93c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.565725] env[61648]: DEBUG nova.compute.provider_tree [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.701020] env[61648]: DEBUG nova.network.neutron [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.751735] env[61648]: DEBUG nova.network.neutron [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.072706] env[61648]: DEBUG nova.scheduler.client.report [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 574.206543] env[61648]: DEBUG oslo_concurrency.lockutils [req-66bccc29-61d1-446b-a3e5-e066acf25241 req-c63ea4b7-5e7f-4981-ab32-2a66b0fa6018 service nova] Releasing lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.206912] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquired lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.208261] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 574.254069] env[61648]: INFO nova.compute.manager [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: 7cee9c25-69f4-4ceb-ba48-0cc246657fdf] Took 1.04 seconds to deallocate network for instance. [ 574.578352] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.579826] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 574.582888] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.687s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.755970] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.850209] env[61648]: ERROR nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 574.850209] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.850209] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.850209] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.850209] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.850209] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.850209] env[61648]: ERROR nova.compute.manager raise self.value [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.850209] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 574.850209] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.850209] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 574.850919] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.850919] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 574.850919] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 574.850919] env[61648]: ERROR nova.compute.manager [ 574.850919] env[61648]: Traceback (most recent call last): [ 574.850919] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 574.850919] env[61648]: listener.cb(fileno) [ 574.850919] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.850919] env[61648]: result = function(*args, **kwargs) [ 574.850919] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 574.850919] env[61648]: return func(*args, **kwargs) [ 574.850919] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.850919] env[61648]: raise e [ 574.850919] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.850919] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 574.850919] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.850919] env[61648]: created_port_ids = self._update_ports_for_instance( [ 574.850919] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.850919] env[61648]: with excutils.save_and_reraise_exception(): [ 574.850919] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.850919] env[61648]: self.force_reraise() [ 574.850919] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.850919] env[61648]: raise self.value [ 574.850919] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.850919] env[61648]: updated_port = self._update_port( [ 574.850919] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.850919] env[61648]: _ensure_no_port_binding_failure(port) [ 574.850919] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.850919] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 574.851875] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 574.851875] env[61648]: Removing descriptor: 14 [ 574.851875] env[61648]: ERROR nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Traceback (most recent call last): [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] yield resources [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self.driver.spawn(context, instance, image_meta, [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.851875] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] vm_ref = self.build_virtual_machine(instance, [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] for vif in network_info: [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return self._sync_wrapper(fn, *args, **kwargs) [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self.wait() [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self[:] = self._gt.wait() [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return self._exit_event.wait() [ 574.852286] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] result = hub.switch() [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return self.greenlet.switch() [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] result = function(*args, **kwargs) [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return func(*args, **kwargs) [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] raise e [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] nwinfo = self.network_api.allocate_for_instance( [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.852716] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] created_port_ids = self._update_ports_for_instance( [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] with excutils.save_and_reraise_exception(): [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self.force_reraise() [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] raise self.value [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] updated_port = self._update_port( [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] _ensure_no_port_binding_failure(port) [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.853132] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] raise exception.PortBindingFailed(port_id=port['id']) [ 574.854430] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 574.854430] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] [ 574.854430] env[61648]: INFO nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Terminating instance [ 574.854430] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.854430] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.854709] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 574.895376] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.092514] env[61648]: DEBUG nova.compute.utils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.093731] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 575.094051] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 575.179883] env[61648]: DEBUG nova.policy [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8658758cec10421ea417eb40a1a88ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61bcab083e6b4e1da5a11cfc573e1e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 575.243480] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "a9d3592b-56f7-4823-bf0c-8b92ac4587bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.243737] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "a9d3592b-56f7-4823-bf0c-8b92ac4587bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.270811] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "b07648a0-23a5-4dee-9582-ce393292b768" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.271176] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "b07648a0-23a5-4dee-9582-ce393292b768" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.293357] env[61648]: INFO nova.scheduler.client.report [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Deleted allocations for instance 7cee9c25-69f4-4ceb-ba48-0cc246657fdf [ 575.306508] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "e9d5a8b8-afc2-40dc-b480-0b946e085e18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.306677] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "e9d5a8b8-afc2-40dc-b480-0b946e085e18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.386861] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.399795] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Releasing lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.400246] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 575.400426] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 575.401150] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cdebfdc-427a-4a05-babf-9e87ee729128 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.413747] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b823d570-792c-49bb-8d29-21ed5dc1cfcf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.442117] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2dadf90-2469-4df6-bcc4-dd65d8a748bc could not be found. [ 575.442773] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 575.442773] env[61648]: INFO nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 575.442773] env[61648]: DEBUG oslo.service.loopingcall [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.448071] env[61648]: DEBUG nova.compute.manager [-] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 575.448071] env[61648]: DEBUG nova.network.neutron [-] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 575.471892] env[61648]: DEBUG nova.network.neutron [-] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.567766] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquiring lock "a1a8b990-f4b7-4049-9345-562d1b5c180e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.568074] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Lock "a1a8b990-f4b7-4049-9345-562d1b5c180e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.581922] env[61648]: DEBUG nova.compute.manager [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Received event network-vif-deleted-b57c77eb-fa48-4bba-a182-702c61029b4e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.583769] env[61648]: DEBUG nova.compute.manager [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Received event network-changed-00364354-7ff7-48d7-ba06-aaa2777ab71c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 575.585218] env[61648]: DEBUG nova.compute.manager [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Refreshing instance network info cache due to event network-changed-00364354-7ff7-48d7-ba06-aaa2777ab71c. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 575.588937] env[61648]: DEBUG oslo_concurrency.lockutils [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] Acquiring lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.591092] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.596909] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 575.688381] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquiring lock "a978aa73-3f2a-4a87-bda3-bcde3028a646" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.688695] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Lock "a978aa73-3f2a-4a87-bda3-bcde3028a646" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.701241] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6807c99d-ac98-4cc2-9a0a-98f4db8b0984 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.716135] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9ebac2-1bbe-434c-92fb-dcf204e43ab3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.749595] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Successfully created port: 473639d2-609a-43b0-9eea-0296702d5358 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 575.751420] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d762774-1e41-4aba-95c0-25447bfec0ab {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.758783] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166a9b9d-5355-4375-b1fc-be2ac2fe45e7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.773221] env[61648]: DEBUG nova.compute.provider_tree [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.811492] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8926275f-e4dc-4ec3-93c3-0132c0d3d001 tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "7cee9c25-69f4-4ceb-ba48-0cc246657fdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.223s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.975355] env[61648]: DEBUG nova.network.neutron [-] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.094628] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.095155] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 576.095351] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 576.095664] env[61648]: DEBUG oslo_concurrency.lockutils [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] Acquired lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.095887] env[61648]: DEBUG nova.network.neutron [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Refreshing network info cache for port 00364354-7ff7-48d7-ba06-aaa2777ab71c {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 576.100038] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f66ca2ca-3835-4942-abc6-67e85592450d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.113942] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263e8e9d-bd9c-4a37-92ec-111ae620a6b0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.142770] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66562ea6-5d39-4b98-a9e2-0512295ab94f could not be found. [ 576.142872] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 576.143014] env[61648]: INFO nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 576.143309] env[61648]: DEBUG oslo.service.loopingcall [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.143540] env[61648]: DEBUG nova.compute.manager [-] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 576.143627] env[61648]: DEBUG nova.network.neutron [-] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 576.172969] env[61648]: DEBUG nova.network.neutron [-] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.277234] env[61648]: DEBUG nova.scheduler.client.report [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.316346] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 576.479611] env[61648]: INFO nova.compute.manager [-] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Took 1.03 seconds to deallocate network for instance. [ 576.480603] env[61648]: DEBUG nova.compute.claims [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 576.480720] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.610910] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 576.627265] env[61648]: DEBUG nova.network.neutron [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.644559] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 576.644830] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 576.645085] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.648356] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 576.648755] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.648755] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 576.648966] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 576.649151] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 576.649327] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 576.649491] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 576.649661] env[61648]: DEBUG nova.virt.hardware [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 576.650542] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19f4fef-3cdb-46d0-b417-74b902edd110 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.658521] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9e95b6-8565-4cda-a61d-d7a4f2a45065 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.676171] env[61648]: DEBUG nova.network.neutron [-] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.784505] env[61648]: DEBUG nova.network.neutron [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.785825] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.203s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.790076] env[61648]: ERROR nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Traceback (most recent call last): [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self.driver.spawn(context, instance, image_meta, [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] vm_ref = self.build_virtual_machine(instance, [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.790076] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] for vif in network_info: [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return self._sync_wrapper(fn, *args, **kwargs) [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self.wait() [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self[:] = self._gt.wait() [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return self._exit_event.wait() [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] result = hub.switch() [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.790448] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return self.greenlet.switch() [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] result = function(*args, **kwargs) [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] return func(*args, **kwargs) [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] raise e [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] nwinfo = self.network_api.allocate_for_instance( [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] created_port_ids = self._update_ports_for_instance( [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] with excutils.save_and_reraise_exception(): [ 576.790941] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] self.force_reraise() [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] raise self.value [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] updated_port = self._update_port( [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] _ensure_no_port_binding_failure(port) [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] raise exception.PortBindingFailed(port_id=port['id']) [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] nova.exception.PortBindingFailed: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. [ 576.791348] env[61648]: ERROR nova.compute.manager [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] [ 576.794345] env[61648]: DEBUG nova.compute.utils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 576.796018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.477s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.801346] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Build of instance f03f349e-d5ed-437b-8b13-6b036f2b88dc was re-scheduled: Binding failed for port 5b10ac06-87b4-4ff9-b72f-093167b1d8cb, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 576.801882] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 576.802186] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.802407] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquired lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.802626] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.844313] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.178746] env[61648]: INFO nova.compute.manager [-] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Took 1.03 seconds to deallocate network for instance. [ 577.181414] env[61648]: DEBUG nova.compute.claims [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 577.181630] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.258782] env[61648]: ERROR nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 577.258782] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.258782] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.258782] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.258782] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.258782] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.258782] env[61648]: ERROR nova.compute.manager raise self.value [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.258782] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 577.258782] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.258782] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 577.259379] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.259379] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 577.259379] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 577.259379] env[61648]: ERROR nova.compute.manager [ 577.259379] env[61648]: Traceback (most recent call last): [ 577.259379] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 577.259379] env[61648]: listener.cb(fileno) [ 577.259379] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.259379] env[61648]: result = function(*args, **kwargs) [ 577.259379] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.259379] env[61648]: return func(*args, **kwargs) [ 577.259379] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.259379] env[61648]: raise e [ 577.259379] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.259379] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 577.259379] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.259379] env[61648]: created_port_ids = self._update_ports_for_instance( [ 577.259379] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.259379] env[61648]: with excutils.save_and_reraise_exception(): [ 577.259379] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.259379] env[61648]: self.force_reraise() [ 577.259379] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.259379] env[61648]: raise self.value [ 577.259379] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.259379] env[61648]: updated_port = self._update_port( [ 577.259379] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.259379] env[61648]: _ensure_no_port_binding_failure(port) [ 577.259379] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.259379] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 577.260271] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 577.260271] env[61648]: Removing descriptor: 19 [ 577.260271] env[61648]: ERROR nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Traceback (most recent call last): [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] yield resources [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self.driver.spawn(context, instance, image_meta, [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.260271] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] vm_ref = self.build_virtual_machine(instance, [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] for vif in network_info: [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return self._sync_wrapper(fn, *args, **kwargs) [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self.wait() [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self[:] = self._gt.wait() [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return self._exit_event.wait() [ 577.260663] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] result = hub.switch() [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return self.greenlet.switch() [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] result = function(*args, **kwargs) [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return func(*args, **kwargs) [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] raise e [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] nwinfo = self.network_api.allocate_for_instance( [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.261133] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] created_port_ids = self._update_ports_for_instance( [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] with excutils.save_and_reraise_exception(): [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self.force_reraise() [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] raise self.value [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] updated_port = self._update_port( [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] _ensure_no_port_binding_failure(port) [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.261624] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] raise exception.PortBindingFailed(port_id=port['id']) [ 577.263087] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 577.263087] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] [ 577.263087] env[61648]: INFO nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Terminating instance [ 577.263087] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.263087] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.263392] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 577.297349] env[61648]: DEBUG oslo_concurrency.lockutils [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] Releasing lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.297577] env[61648]: DEBUG nova.compute.manager [req-d61db3cd-6810-41f6-91df-a70b557da7ca req-49de1361-dbdb-4d0c-bccf-a1963b4a85c6 service nova] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Received event network-vif-deleted-00364354-7ff7-48d7-ba06-aaa2777ab71c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 577.329731] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.434444] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.668901] env[61648]: DEBUG nova.compute.manager [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Received event network-changed-473639d2-609a-43b0-9eea-0296702d5358 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 577.669126] env[61648]: DEBUG nova.compute.manager [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Refreshing instance network info cache due to event network-changed-473639d2-609a-43b0-9eea-0296702d5358. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 577.669308] env[61648]: DEBUG oslo_concurrency.lockutils [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] Acquiring lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.758539] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ba958e-b875-4faf-932b-36c92295bac5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.765267] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0272c2cf-d31b-4e5c-b298-3f8df4739086 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.802586] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.806012] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611f08bf-91b7-49a2-a8bc-0ad86c29c42e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.814014] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096c6b6c-182a-48ce-9849-a885ce03196f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.828534] env[61648]: DEBUG nova.compute.provider_tree [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.884979] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.940361] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Releasing lock "refresh_cache-f03f349e-d5ed-437b-8b13-6b036f2b88dc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.941331] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 577.941331] env[61648]: DEBUG nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.941331] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 577.961484] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.335361] env[61648]: DEBUG nova.scheduler.client.report [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.390022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.390022] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 578.390022] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 578.390022] env[61648]: DEBUG oslo_concurrency.lockutils [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] Acquired lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.390022] env[61648]: DEBUG nova.network.neutron [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Refreshing network info cache for port 473639d2-609a-43b0-9eea-0296702d5358 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 578.390376] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2170f89d-7e99-4460-8636-3e2d794f6688 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.399546] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0dfc92-668f-452b-bf2d-e30c16077607 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.432455] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e51d3f4e-41d5-4190-a8e9-21c743aa3b5e could not be found. [ 578.434061] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 578.434061] env[61648]: INFO nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 578.434061] env[61648]: DEBUG oslo.service.loopingcall [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.434311] env[61648]: DEBUG nova.compute.manager [-] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 578.434311] env[61648]: DEBUG nova.network.neutron [-] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 578.450961] env[61648]: DEBUG nova.network.neutron [-] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.467162] env[61648]: DEBUG nova.network.neutron [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.841525] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.045s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.842235] env[61648]: ERROR nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Traceback (most recent call last): [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self.driver.spawn(context, instance, image_meta, [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] vm_ref = self.build_virtual_machine(instance, [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] vif_infos = vmwarevif.get_vif_info(self._session, [ 578.842235] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] for vif in network_info: [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return self._sync_wrapper(fn, *args, **kwargs) [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self.wait() [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self[:] = self._gt.wait() [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return self._exit_event.wait() [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] result = hub.switch() [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 578.842638] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return self.greenlet.switch() [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] result = function(*args, **kwargs) [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] return func(*args, **kwargs) [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] raise e [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] nwinfo = self.network_api.allocate_for_instance( [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] created_port_ids = self._update_ports_for_instance( [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] with excutils.save_and_reraise_exception(): [ 578.843045] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] self.force_reraise() [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] raise self.value [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] updated_port = self._update_port( [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] _ensure_no_port_binding_failure(port) [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] raise exception.PortBindingFailed(port_id=port['id']) [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] nova.exception.PortBindingFailed: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. [ 578.843467] env[61648]: ERROR nova.compute.manager [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] [ 578.846030] env[61648]: DEBUG nova.compute.utils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 578.846278] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Build of instance 3c252464-1b1a-4c1a-86bb-2fb0107aa52f was re-scheduled: Binding failed for port 66a46ce9-a11c-41e9-a22e-9650b67e2740, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 578.846705] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 578.846926] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquiring lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.847080] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Acquired lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.849357] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 578.853022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.006s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.853138] env[61648]: INFO nova.compute.claims [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.908988] env[61648]: DEBUG nova.network.neutron [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.953668] env[61648]: DEBUG nova.network.neutron [-] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.970011] env[61648]: INFO nova.compute.manager [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: f03f349e-d5ed-437b-8b13-6b036f2b88dc] Took 1.03 seconds to deallocate network for instance. [ 579.032542] env[61648]: DEBUG nova.network.neutron [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.374736] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.457787] env[61648]: INFO nova.compute.manager [-] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Took 1.02 seconds to deallocate network for instance. [ 579.460010] env[61648]: DEBUG nova.compute.claims [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 579.460149] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.500270] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.537164] env[61648]: DEBUG oslo_concurrency.lockutils [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] Releasing lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.537448] env[61648]: DEBUG nova.compute.manager [req-538052d5-237a-4c10-9b2b-bfe52c259f47 req-242cfdea-cd9d-42b4-9625-aa1ad5932f61 service nova] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Received event network-vif-deleted-473639d2-609a-43b0-9eea-0296702d5358 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 580.001553] env[61648]: INFO nova.scheduler.client.report [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Deleted allocations for instance f03f349e-d5ed-437b-8b13-6b036f2b88dc [ 580.012257] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Releasing lock "refresh_cache-3c252464-1b1a-4c1a-86bb-2fb0107aa52f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.012506] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 580.012682] env[61648]: DEBUG nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 580.012853] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 580.040319] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.385883] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0163a471-df6c-47e2-905d-c9dccf674c42 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.395915] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de3a504-fec1-4db6-9489-2a6c99fcfd24 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.429876] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9ab378-f05c-4edb-90b1-fa4fd75119e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.437141] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1e8a9e-b484-41a9-89cf-4d56957ad3da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.456195] env[61648]: DEBUG nova.compute.provider_tree [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.523085] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928cc184-8281-4f84-b435-5a040e3f405d tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "f03f349e-d5ed-437b-8b13-6b036f2b88dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.006s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.543952] env[61648]: DEBUG nova.network.neutron [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.964443] env[61648]: DEBUG nova.scheduler.client.report [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.031534] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 581.049369] env[61648]: INFO nova.compute.manager [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] [instance: 3c252464-1b1a-4c1a-86bb-2fb0107aa52f] Took 1.04 seconds to deallocate network for instance. [ 581.472189] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.473796] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 581.475466] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.528s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.553217] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.980022] env[61648]: DEBUG nova.compute.utils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 581.981344] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 581.981516] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.059329] env[61648]: DEBUG nova.policy [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30ad29380be74652b3dec8c76685d011', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13d79b9c03214bedb8f5e3fda4023db3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 582.082009] env[61648]: INFO nova.scheduler.client.report [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Deleted allocations for instance 3c252464-1b1a-4c1a-86bb-2fb0107aa52f [ 582.469225] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3b288a-ea21-4edf-ad61-fb2264b10d24 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.478543] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c36c18a-abcb-4553-a25e-671404232606 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.510960] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Successfully created port: c43cbf92-2c48-41ba-822c-c6a62a56fae1 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.513299] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 582.520141] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576547c6-2dfb-40bb-9d31-29c866c7367a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.524657] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c35648-3623-4f9a-8724-c1b364d40ddc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.539454] env[61648]: DEBUG nova.compute.provider_tree [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.595636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1993af3f-a073-4713-9055-06af79e05cbb tempest-ImagesOneServerNegativeTestJSON-1029240814 tempest-ImagesOneServerNegativeTestJSON-1029240814-project-member] Lock "3c252464-1b1a-4c1a-86bb-2fb0107aa52f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.782s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.042528] env[61648]: DEBUG nova.scheduler.client.report [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.097813] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 583.528057] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 583.553015] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.077s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.555815] env[61648]: ERROR nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Traceback (most recent call last): [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self.driver.spawn(context, instance, image_meta, [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] vm_ref = self.build_virtual_machine(instance, [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.555815] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] for vif in network_info: [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return self._sync_wrapper(fn, *args, **kwargs) [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self.wait() [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self[:] = self._gt.wait() [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return self._exit_event.wait() [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] result = hub.switch() [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.556363] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return self.greenlet.switch() [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] result = function(*args, **kwargs) [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] return func(*args, **kwargs) [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] raise e [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] nwinfo = self.network_api.allocate_for_instance( [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] created_port_ids = self._update_ports_for_instance( [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] with excutils.save_and_reraise_exception(): [ 583.556770] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] self.force_reraise() [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] raise self.value [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] updated_port = self._update_port( [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] _ensure_no_port_binding_failure(port) [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] raise exception.PortBindingFailed(port_id=port['id']) [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] nova.exception.PortBindingFailed: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. [ 583.557191] env[61648]: ERROR nova.compute.manager [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] [ 583.557954] env[61648]: DEBUG nova.compute.utils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 583.560045] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.488s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.565606] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Build of instance b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b was re-scheduled: Binding failed for port c460a3c2-72e9-4b13-a346-e7d0ac4a12ed, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 583.566119] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 583.566350] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquiring lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.566495] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Acquired lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.566650] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.573129] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.573129] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.573129] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.573323] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.573323] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.573323] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.573323] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.573323] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.574352] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.574582] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.574867] env[61648]: DEBUG nova.virt.hardware [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.575983] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18abea4-1f36-4cc5-aad4-18569e3cea7e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.591681] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe911283-e520-4629-b72c-ce80fe135a2e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.621179] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.634016] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.813242] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.309461] env[61648]: DEBUG nova.compute.manager [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Received event network-changed-c43cbf92-2c48-41ba-822c-c6a62a56fae1 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 584.309662] env[61648]: DEBUG nova.compute.manager [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Refreshing instance network info cache due to event network-changed-c43cbf92-2c48-41ba-822c-c6a62a56fae1. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 584.309869] env[61648]: DEBUG oslo_concurrency.lockutils [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] Acquiring lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.310019] env[61648]: DEBUG oslo_concurrency.lockutils [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] Acquired lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.312161] env[61648]: DEBUG nova.network.neutron [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Refreshing network info cache for port c43cbf92-2c48-41ba-822c-c6a62a56fae1 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 584.320108] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Releasing lock "refresh_cache-b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.320392] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 584.320622] env[61648]: DEBUG nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.320813] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 584.344949] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.501318] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417e58c2-5374-4953-b686-d7f414cb32f6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.509187] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bd3934-384d-42cb-bff3-b6388495b85e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.546019] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a08313-5109-4f95-baf0-cc509e4c5f92 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.554693] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b382cb2a-8371-47cf-9927-d74d03a4aa81 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.569439] env[61648]: DEBUG nova.compute.provider_tree [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.600434] env[61648]: ERROR nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 584.600434] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.600434] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.600434] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.600434] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.600434] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.600434] env[61648]: ERROR nova.compute.manager raise self.value [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.600434] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 584.600434] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.600434] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 584.601132] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.601132] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 584.601132] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 584.601132] env[61648]: ERROR nova.compute.manager [ 584.601132] env[61648]: Traceback (most recent call last): [ 584.601132] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 584.601132] env[61648]: listener.cb(fileno) [ 584.601132] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.601132] env[61648]: result = function(*args, **kwargs) [ 584.601132] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.601132] env[61648]: return func(*args, **kwargs) [ 584.601132] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.601132] env[61648]: raise e [ 584.601132] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.601132] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 584.601132] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.601132] env[61648]: created_port_ids = self._update_ports_for_instance( [ 584.601132] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.601132] env[61648]: with excutils.save_and_reraise_exception(): [ 584.601132] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.601132] env[61648]: self.force_reraise() [ 584.601132] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.601132] env[61648]: raise self.value [ 584.601132] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.601132] env[61648]: updated_port = self._update_port( [ 584.601132] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.601132] env[61648]: _ensure_no_port_binding_failure(port) [ 584.601132] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.601132] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 584.602571] env[61648]: nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 584.602571] env[61648]: Removing descriptor: 19 [ 584.602571] env[61648]: ERROR nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Traceback (most recent call last): [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] yield resources [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self.driver.spawn(context, instance, image_meta, [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.602571] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] vm_ref = self.build_virtual_machine(instance, [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] for vif in network_info: [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return self._sync_wrapper(fn, *args, **kwargs) [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self.wait() [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self[:] = self._gt.wait() [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return self._exit_event.wait() [ 584.603591] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] result = hub.switch() [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return self.greenlet.switch() [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] result = function(*args, **kwargs) [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return func(*args, **kwargs) [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] raise e [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] nwinfo = self.network_api.allocate_for_instance( [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.603872] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] created_port_ids = self._update_ports_for_instance( [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] with excutils.save_and_reraise_exception(): [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self.force_reraise() [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] raise self.value [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] updated_port = self._update_port( [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] _ensure_no_port_binding_failure(port) [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.604215] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] raise exception.PortBindingFailed(port_id=port['id']) [ 584.604479] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 584.604479] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] [ 584.604479] env[61648]: INFO nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Terminating instance [ 584.604479] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquiring lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.651256] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "0a321a24-0f87-47e7-8364-5da5f6a65131" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.651498] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "0a321a24-0f87-47e7-8364-5da5f6a65131" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.840472] env[61648]: DEBUG nova.network.neutron [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.848673] env[61648]: DEBUG nova.network.neutron [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.983374] env[61648]: DEBUG nova.network.neutron [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.074650] env[61648]: DEBUG nova.scheduler.client.report [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 585.351492] env[61648]: INFO nova.compute.manager [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] [instance: b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b] Took 1.03 seconds to deallocate network for instance. [ 585.485744] env[61648]: DEBUG oslo_concurrency.lockutils [req-1f4c5853-9056-4ff4-9ae5-4a01cf80c1ac req-17521e0b-aac8-4cf6-8e7c-746e2f8229b0 service nova] Releasing lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.486214] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquired lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.486401] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.585651] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.026s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.588457] env[61648]: ERROR nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Traceback (most recent call last): [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self.driver.spawn(context, instance, image_meta, [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] vm_ref = self.build_virtual_machine(instance, [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.588457] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] for vif in network_info: [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return self._sync_wrapper(fn, *args, **kwargs) [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self.wait() [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self[:] = self._gt.wait() [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return self._exit_event.wait() [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] result = hub.switch() [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.588848] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return self.greenlet.switch() [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] result = function(*args, **kwargs) [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] return func(*args, **kwargs) [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] raise e [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] nwinfo = self.network_api.allocate_for_instance( [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] created_port_ids = self._update_ports_for_instance( [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] with excutils.save_and_reraise_exception(): [ 585.589120] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] self.force_reraise() [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] raise self.value [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] updated_port = self._update_port( [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] _ensure_no_port_binding_failure(port) [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] raise exception.PortBindingFailed(port_id=port['id']) [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] nova.exception.PortBindingFailed: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. [ 585.589380] env[61648]: ERROR nova.compute.manager [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] [ 585.589598] env[61648]: DEBUG nova.compute.utils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 585.589598] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.251s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.590977] env[61648]: INFO nova.compute.claims [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.594686] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Build of instance d186a201-4ef8-40a6-9625-5e8ae99af4d1 was re-scheduled: Binding failed for port ac3d7b2e-1aed-46e9-9873-bf19232cff48, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 585.595157] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 585.595377] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquiring lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.595521] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Acquired lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.595682] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 586.014121] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.120565] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.182899] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.239255] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.392201] env[61648]: DEBUG nova.compute.manager [req-314a4cef-8146-40b1-9d3d-6b4403659a1a req-4c1cc29c-02f3-446f-8f4a-9d8d5b22cff0 service nova] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Received event network-vif-deleted-c43cbf92-2c48-41ba-822c-c6a62a56fae1 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 586.405016] env[61648]: INFO nova.scheduler.client.report [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Deleted allocations for instance b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b [ 586.689077] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Releasing lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.689077] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.689077] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.689077] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bf36479-aa2f-4ce1-b827-d2877ae64421 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.700131] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100009f0-5bad-432f-8efb-20389593f5db {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.728195] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 74c00b03-bd37-49f7-b0b9-88404302c071 could not be found. [ 586.728490] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.728728] env[61648]: INFO nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Took 0.04 seconds to destroy the instance on the hypervisor. [ 586.729015] env[61648]: DEBUG oslo.service.loopingcall [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.733854] env[61648]: DEBUG nova.compute.manager [-] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.733962] env[61648]: DEBUG nova.network.neutron [-] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.741446] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Releasing lock "refresh_cache-d186a201-4ef8-40a6-9625-5e8ae99af4d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.741612] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 586.741786] env[61648]: DEBUG nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.741954] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.790546] env[61648]: DEBUG nova.network.neutron [-] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.805759] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.914797] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0edc2f6d-58c4-4da6-8fef-b5ef30520ef0 tempest-ServersWithSpecificFlavorTestJSON-137159576 tempest-ServersWithSpecificFlavorTestJSON-137159576-project-member] Lock "b15dbb0e-4f3d-451f-ba1e-f560b2e3ea6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.934s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.091531] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad5e311-7805-4307-b72e-d52c2b84c415 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.099544] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bf0933-3107-4e82-8e01-5df6fc1fb512 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.130940] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81004f25-eeae-4e4c-883f-c695f5b0e7ed {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.140015] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584ed07a-e8c0-42ee-929e-5e575f1d57b0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.157778] env[61648]: DEBUG nova.compute.provider_tree [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.293574] env[61648]: DEBUG nova.network.neutron [-] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.306630] env[61648]: DEBUG nova.network.neutron [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.419354] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 587.664626] env[61648]: DEBUG nova.scheduler.client.report [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 587.797754] env[61648]: INFO nova.compute.manager [-] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Took 1.06 seconds to deallocate network for instance. [ 587.798674] env[61648]: DEBUG nova.compute.claims [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 587.798837] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.809519] env[61648]: INFO nova.compute.manager [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] [instance: d186a201-4ef8-40a6-9625-5e8ae99af4d1] Took 1.07 seconds to deallocate network for instance. [ 587.954014] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.171018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.171018] env[61648]: DEBUG nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 588.172367] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.692s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.684863] env[61648]: DEBUG nova.compute.utils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.688045] env[61648]: DEBUG nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 588.840998] env[61648]: INFO nova.scheduler.client.report [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Deleted allocations for instance d186a201-4ef8-40a6-9625-5e8ae99af4d1 [ 589.191060] env[61648]: DEBUG nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 589.258672] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2772e0-07d3-4f61-b21d-94aab13e7871 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.267360] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27eb7567-d261-4b62-bbdc-641183ece4fb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.302511] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6775ba87-8806-4d44-b8f1-1fea269effc7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.310958] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdb5d7b-3bee-4fe1-8ebb-ab6c35cc1cb5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.325897] env[61648]: DEBUG nova.compute.provider_tree [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.354357] env[61648]: DEBUG oslo_concurrency.lockutils [None req-143537bb-4b12-4585-aa37-c0466e00f69f tempest-ListServerFiltersTestJSON-1129996873 tempest-ListServerFiltersTestJSON-1129996873-project-member] Lock "d186a201-4ef8-40a6-9625-5e8ae99af4d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.314s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.832658] env[61648]: DEBUG nova.scheduler.client.report [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.864845] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 590.207155] env[61648]: DEBUG nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 590.236259] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.236509] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.236665] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.236840] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.236981] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.236981] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.237415] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.237497] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.237660] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.237817] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.238261] env[61648]: DEBUG nova.virt.hardware [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.239072] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b1ae1c-cb57-48b3-84fa-7bd46ed4c6e5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.248115] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5507f3b8-9657-4174-88c2-58c0be2d0518 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.263249] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.269305] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Creating folder: Project (8821076c375841d9827ffdc985f81e68). Parent ref: group-v285225. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.269763] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9496328e-5f54-4b4a-bcb0-54939ab93803 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.284661] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Created folder: Project (8821076c375841d9827ffdc985f81e68) in parent group-v285225. [ 590.284989] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Creating folder: Instances. Parent ref: group-v285233. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.285272] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-269a782b-bbb1-4ab2-a45a-3af16749b65a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.297019] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Created folder: Instances in parent group-v285233. [ 590.297019] env[61648]: DEBUG oslo.service.loopingcall [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.297019] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.297019] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1a7aea1-325d-4295-8a64-ddec382f4703 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.315346] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.315346] env[61648]: value = "task-1336606" [ 590.315346] env[61648]: _type = "Task" [ 590.315346] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.323407] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336606, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.339587] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.166s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.339587] env[61648]: ERROR nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Traceback (most recent call last): [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self.driver.spawn(context, instance, image_meta, [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.339587] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] vm_ref = self.build_virtual_machine(instance, [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] for vif in network_info: [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return self._sync_wrapper(fn, *args, **kwargs) [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self.wait() [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self[:] = self._gt.wait() [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return self._exit_event.wait() [ 590.339951] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] result = hub.switch() [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return self.greenlet.switch() [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] result = function(*args, **kwargs) [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] return func(*args, **kwargs) [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] raise e [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] nwinfo = self.network_api.allocate_for_instance( [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.340291] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] created_port_ids = self._update_ports_for_instance( [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] with excutils.save_and_reraise_exception(): [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] self.force_reraise() [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] raise self.value [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] updated_port = self._update_port( [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] _ensure_no_port_binding_failure(port) [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.340634] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] raise exception.PortBindingFailed(port_id=port['id']) [ 590.340936] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] nova.exception.PortBindingFailed: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. [ 590.340936] env[61648]: ERROR nova.compute.manager [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] [ 590.340936] env[61648]: DEBUG nova.compute.utils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 590.342206] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.497s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.343757] env[61648]: INFO nova.compute.claims [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.347887] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Build of instance c2dadf90-2469-4df6-bcc4-dd65d8a748bc was re-scheduled: Binding failed for port b57c77eb-fa48-4bba-a182-702c61029b4e, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 590.347887] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 590.348060] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquiring lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.348234] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Acquired lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.348406] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.398130] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.826812] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336606, 'name': CreateVM_Task, 'duration_secs': 0.339652} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.826812] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 590.827289] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.827408] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.828024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 590.828024] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0ef2ab8-35e2-49a1-8625-58da4f44e0e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.832811] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 590.832811] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f96f02-34d4-f1a2-2b3e-2964ae818272" [ 590.832811] env[61648]: _type = "Task" [ 590.832811] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.841265] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f96f02-34d4-f1a2-2b3e-2964ae818272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.882511] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.049020] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.319891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquiring lock "81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.319891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Lock "81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.342051] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f96f02-34d4-f1a2-2b3e-2964ae818272, 'name': SearchDatastore_Task, 'duration_secs': 0.010103} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.342355] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.342577] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.342803] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.342939] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.343121] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.343381] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-262df720-7204-418a-9556-87ec48132779 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.352419] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.352581] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 591.354464] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-092b1fe8-a964-42da-bc2e-34d4fc9359d2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.366309] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 591.366309] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52955030-eb2d-d6c2-9e92-6a84df418a8a" [ 591.366309] env[61648]: _type = "Task" [ 591.366309] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.379794] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52955030-eb2d-d6c2-9e92-6a84df418a8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010035} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.380733] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07cc1cf7-1373-4cf5-ba42-d7e9048472d4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.386277] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 591.386277] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]5251edad-0357-3213-c467-c1dbe23f531d" [ 591.386277] env[61648]: _type = "Task" [ 591.386277] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.395575] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5251edad-0357-3213-c467-c1dbe23f531d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.552373] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Releasing lock "refresh_cache-c2dadf90-2469-4df6-bcc4-dd65d8a748bc" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.553462] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 591.553462] env[61648]: DEBUG nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 591.553462] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 591.576374] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.903533] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5251edad-0357-3213-c467-c1dbe23f531d, 'name': SearchDatastore_Task, 'duration_secs': 0.009231} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.903836] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.904070] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 591.904736] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8dca222-77e9-4675-9149-059786a4034e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.909163] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8331a54c-d052-4eed-a6f2-83610c85169e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.921266] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 591.921266] env[61648]: value = "task-1336607" [ 591.921266] env[61648]: _type = "Task" [ 591.921266] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.940712] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097ba25b-a1da-49f2-b1a3-4cde60fdfb34 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.944481] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.977330] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0724f894-c0d9-49ec-a630-31c9e11ca99c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.986723] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef3e940-6f8e-48d4-81c8-c6d011a3550a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.005101] env[61648]: DEBUG nova.compute.provider_tree [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.078462] env[61648]: DEBUG nova.network.neutron [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.741921] env[61648]: DEBUG nova.scheduler.client.report [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.745207] env[61648]: INFO nova.compute.manager [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] [instance: c2dadf90-2469-4df6-bcc4-dd65d8a748bc] Took 1.19 seconds to deallocate network for instance. [ 592.756279] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519671} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.758367] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 592.758367] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 592.758367] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8636e52b-4515-4fc8-a826-c9cd9ffece90 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.763816] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 592.763816] env[61648]: value = "task-1336608" [ 592.763816] env[61648]: _type = "Task" [ 592.763816] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.772114] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336608, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.249346] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.907s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.249346] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 593.251526] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.070s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.272911] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062487} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.273196] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 593.274036] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1aeda06-24b7-483b-b346-ce8ad7443cad {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.293552] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 593.295426] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48f6dacb-cb51-4213-b2bc-8504984917a2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.314391] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 593.314391] env[61648]: value = "task-1336609" [ 593.314391] env[61648]: _type = "Task" [ 593.314391] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.322042] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.759230] env[61648]: DEBUG nova.compute.utils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.775402] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 593.775402] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.805727] env[61648]: INFO nova.scheduler.client.report [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Deleted allocations for instance c2dadf90-2469-4df6-bcc4-dd65d8a748bc [ 593.826334] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336609, 'name': ReconfigVM_Task, 'duration_secs': 0.312497} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.830556] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Reconfigured VM instance instance-00000015 to attach disk [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 593.831725] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20a80f5a-8f90-46a3-8cd3-8b6c5d8b4ce4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.835185] env[61648]: DEBUG nova.policy [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07695904a1d6484e9890f7a83d0252fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abf6ecd1a0b94fa1b2e085bb6fdef2c2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 593.839201] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 593.839201] env[61648]: value = "task-1336610" [ 593.839201] env[61648]: _type = "Task" [ 593.839201] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.850355] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336610, 'name': Rename_Task} progress is 6%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.276696] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 594.294255] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5488a0a-4c9d-487e-afcf-31475a083215 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.303790] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dcef28-e047-44e9-a96a-dc6ed0a9dd8d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.337265] env[61648]: DEBUG oslo_concurrency.lockutils [None req-32d83bf5-fb5a-444d-ac4c-38c1af9ef0a9 tempest-VolumesAssistedSnapshotsTest-616813885 tempest-VolumesAssistedSnapshotsTest-616813885-project-member] Lock "c2dadf90-2469-4df6-bcc4-dd65d8a748bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.744s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.339421] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e051e8c5-d397-4164-a68b-32ed994e5936 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.354072] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d9dcba-9a94-4da8-878d-731dd708b412 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.359241] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336610, 'name': Rename_Task, 'duration_secs': 0.134403} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.360189] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 594.361758] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ea4c155-a184-46da-9578-ecf79d5105c5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.376067] env[61648]: DEBUG nova.compute.provider_tree [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.383428] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 594.383428] env[61648]: value = "task-1336611" [ 594.383428] env[61648]: _type = "Task" [ 594.383428] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.395372] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.435603] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Successfully created port: c6388b06-4edc-443f-83b4-6856374e284f {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.843227] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 594.895023] env[61648]: DEBUG oslo_vmware.api [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336611, 'name': PowerOnVM_Task, 'duration_secs': 0.419718} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.895240] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 594.895452] env[61648]: INFO nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Took 4.69 seconds to spawn the instance on the hypervisor. [ 594.895592] env[61648]: DEBUG nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 594.896403] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060fb004-dc0a-4459-810c-8ff29cb3e9d3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.905345] env[61648]: ERROR nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [req-f2705333-d56e-4ffc-8d60-053d00af58f8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f2705333-d56e-4ffc-8d60-053d00af58f8"}]}: nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 594.924658] env[61648]: DEBUG nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 594.950143] env[61648]: DEBUG nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 594.950381] env[61648]: DEBUG nova.compute.provider_tree [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.967031] env[61648]: DEBUG nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 594.997118] env[61648]: DEBUG nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 595.287714] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 595.324741] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.325013] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.325176] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.325433] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.325590] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.325812] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.325972] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.326147] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.326311] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.326467] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.326631] env[61648]: DEBUG nova.virt.hardware [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.327678] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995632c3-69e4-4780-a567-9ada0e69a91b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.335195] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abdd547-5031-47d3-baaf-48d9ccfde2eb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.369063] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.416269] env[61648]: DEBUG nova.compute.manager [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Received event network-changed-c6388b06-4edc-443f-83b4-6856374e284f {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 595.416472] env[61648]: DEBUG nova.compute.manager [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Refreshing instance network info cache due to event network-changed-c6388b06-4edc-443f-83b4-6856374e284f. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 595.416675] env[61648]: DEBUG oslo_concurrency.lockutils [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] Acquiring lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.416817] env[61648]: DEBUG oslo_concurrency.lockutils [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] Acquired lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.416970] env[61648]: DEBUG nova.network.neutron [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Refreshing network info cache for port c6388b06-4edc-443f-83b4-6856374e284f {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.424907] env[61648]: INFO nova.compute.manager [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Took 26.11 seconds to build instance. [ 595.475143] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aac754-ede3-40f9-85ae-8c891d708f1c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.483960] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5c8161-02fb-4e05-8f82-92ad62a69be6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.525267] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f79893a-84b0-4f80-9089-19a542bd71a4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.538058] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cbf5c1-6310-4813-bbe0-3d1d4da32671 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.548451] env[61648]: DEBUG nova.compute.provider_tree [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 595.645977] env[61648]: ERROR nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 595.645977] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.645977] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.645977] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.645977] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.645977] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.645977] env[61648]: ERROR nova.compute.manager raise self.value [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.645977] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 595.645977] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.645977] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 595.646401] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.646401] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 595.646401] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 595.646401] env[61648]: ERROR nova.compute.manager [ 595.646401] env[61648]: Traceback (most recent call last): [ 595.646401] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 595.646401] env[61648]: listener.cb(fileno) [ 595.646401] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.646401] env[61648]: result = function(*args, **kwargs) [ 595.646401] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 595.646401] env[61648]: return func(*args, **kwargs) [ 595.646401] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.646401] env[61648]: raise e [ 595.646401] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.646401] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 595.646401] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.646401] env[61648]: created_port_ids = self._update_ports_for_instance( [ 595.646401] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.646401] env[61648]: with excutils.save_and_reraise_exception(): [ 595.646401] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.646401] env[61648]: self.force_reraise() [ 595.646401] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.646401] env[61648]: raise self.value [ 595.646401] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.646401] env[61648]: updated_port = self._update_port( [ 595.646401] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.646401] env[61648]: _ensure_no_port_binding_failure(port) [ 595.646401] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.646401] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 595.647059] env[61648]: nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 595.647059] env[61648]: Removing descriptor: 19 [ 595.647059] env[61648]: ERROR nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Traceback (most recent call last): [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] yield resources [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self.driver.spawn(context, instance, image_meta, [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.647059] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] vm_ref = self.build_virtual_machine(instance, [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] vif_infos = vmwarevif.get_vif_info(self._session, [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] for vif in network_info: [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return self._sync_wrapper(fn, *args, **kwargs) [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self.wait() [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self[:] = self._gt.wait() [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return self._exit_event.wait() [ 595.647375] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] result = hub.switch() [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return self.greenlet.switch() [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] result = function(*args, **kwargs) [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return func(*args, **kwargs) [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] raise e [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] nwinfo = self.network_api.allocate_for_instance( [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.647680] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] created_port_ids = self._update_ports_for_instance( [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] with excutils.save_and_reraise_exception(): [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self.force_reraise() [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] raise self.value [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] updated_port = self._update_port( [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] _ensure_no_port_binding_failure(port) [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.648046] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] raise exception.PortBindingFailed(port_id=port['id']) [ 595.648328] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 595.648328] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] [ 595.648328] env[61648]: INFO nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Terminating instance [ 595.649298] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.926987] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c57b3dd4-fded-41ad-b56d-36009207230b tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "e918b827-ea37-4589-8999-e363aba4492d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.980s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.951746] env[61648]: DEBUG nova.network.neutron [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.085051] env[61648]: DEBUG nova.network.neutron [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.100533] env[61648]: DEBUG nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 596.100636] env[61648]: DEBUG nova.compute.provider_tree [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 55 to 56 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 596.100985] env[61648]: DEBUG nova.compute.provider_tree [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 596.364709] env[61648]: INFO nova.compute.manager [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Rebuilding instance [ 596.427088] env[61648]: DEBUG nova.compute.manager [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 596.428339] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f797ee1c-c1e5-41cd-a214-753f1790c0cd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.431379] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 596.589942] env[61648]: DEBUG oslo_concurrency.lockutils [req-551b6ff6-a8c7-4959-bc38-bf9fda56dd67 req-683f81ce-13da-418b-9738-6d015677b280 service nova] Releasing lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.594697] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquired lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.595010] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.606193] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.355s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.607383] env[61648]: ERROR nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Traceback (most recent call last): [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self.driver.spawn(context, instance, image_meta, [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] vm_ref = self.build_virtual_machine(instance, [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.607383] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] for vif in network_info: [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return self._sync_wrapper(fn, *args, **kwargs) [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self.wait() [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self[:] = self._gt.wait() [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return self._exit_event.wait() [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] result = hub.switch() [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.607804] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return self.greenlet.switch() [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] result = function(*args, **kwargs) [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] return func(*args, **kwargs) [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] raise e [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] nwinfo = self.network_api.allocate_for_instance( [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] created_port_ids = self._update_ports_for_instance( [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] with excutils.save_and_reraise_exception(): [ 596.608211] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] self.force_reraise() [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] raise self.value [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] updated_port = self._update_port( [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] _ensure_no_port_binding_failure(port) [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] raise exception.PortBindingFailed(port_id=port['id']) [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] nova.exception.PortBindingFailed: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. [ 596.608603] env[61648]: ERROR nova.compute.manager [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] [ 596.609041] env[61648]: DEBUG nova.compute.utils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 596.609982] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.150s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.618961] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Build of instance 66562ea6-5d39-4b98-a9e2-0512295ab94f was re-scheduled: Binding failed for port 00364354-7ff7-48d7-ba06-aaa2777ab71c, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 596.619753] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 596.620122] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.620371] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.620627] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.947266] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 596.947877] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87ff46ae-5edf-457c-98d5-85898a64c0db {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.955400] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 596.955400] env[61648]: value = "task-1336612" [ 596.955400] env[61648]: _type = "Task" [ 596.955400] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.969211] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.969388] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.117654] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.177035] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.279587] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.317643] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.440457] env[61648]: DEBUG nova.compute.manager [req-e94bd3a6-cac5-4ae4-8b57-d4f663754e6e req-2e0bc3f0-b556-44e3-a27f-ad6d19c2b080 service nova] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Received event network-vif-deleted-c6388b06-4edc-443f-83b4-6856374e284f {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.469253] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336612, 'name': PowerOffVM_Task, 'duration_secs': 0.128862} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.469253] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 597.469253] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 597.469913] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff69767-4920-4d11-8e5b-67471f4ac26f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.480849] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 597.481854] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1ecaaab-4971-49d0-b901-68a6d2a7187e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.513185] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 597.513185] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 597.513357] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Deleting the datastore file [datastore2] e918b827-ea37-4589-8999-e363aba4492d {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 597.513619] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94f2eede-28bc-46f7-b5d2-859cb5c13440 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.522498] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 597.522498] env[61648]: value = "task-1336614" [ 597.522498] env[61648]: _type = "Task" [ 597.522498] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.530368] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.571492] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0532265-609d-41de-b838-44c7c3a1a27b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.579013] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119fbb41-b8b6-4ef3-af05-e32a2502ebfb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.609088] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9c3207-c717-4634-b462-a780a7a26ef7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.617105] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8fc332-96f4-4bfe-9a80-d421604dab81 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.630810] env[61648]: DEBUG nova.compute.provider_tree [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.782708] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Releasing lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.783170] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 597.783359] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 597.783652] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb485a68-c7a3-4293-a0a0-0f3bae75fad2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.794413] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272de529-dda4-4c98-a986-db44ec980b9e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.816399] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16e92598-3eba-42c1-b9ec-3b2b91231267 could not be found. [ 597.816399] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 597.816593] env[61648]: INFO nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Took 0.03 seconds to destroy the instance on the hypervisor. [ 597.816716] env[61648]: DEBUG oslo.service.loopingcall [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.816935] env[61648]: DEBUG nova.compute.manager [-] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.817037] env[61648]: DEBUG nova.network.neutron [-] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.820217] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-66562ea6-5d39-4b98-a9e2-0512295ab94f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.820390] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 597.820553] env[61648]: DEBUG nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.820704] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.842712] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.844743] env[61648]: DEBUG nova.network.neutron [-] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.033438] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118628} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.034164] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 598.034164] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 598.034164] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.138694] env[61648]: DEBUG nova.scheduler.client.report [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 598.346693] env[61648]: DEBUG nova.network.neutron [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.347963] env[61648]: DEBUG nova.network.neutron [-] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.644901] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.035s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.645576] env[61648]: ERROR nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Traceback (most recent call last): [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self.driver.spawn(context, instance, image_meta, [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] vm_ref = self.build_virtual_machine(instance, [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.645576] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] for vif in network_info: [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return self._sync_wrapper(fn, *args, **kwargs) [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self.wait() [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self[:] = self._gt.wait() [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return self._exit_event.wait() [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] result = hub.switch() [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.645863] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return self.greenlet.switch() [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] result = function(*args, **kwargs) [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] return func(*args, **kwargs) [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] raise e [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] nwinfo = self.network_api.allocate_for_instance( [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] created_port_ids = self._update_ports_for_instance( [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] with excutils.save_and_reraise_exception(): [ 598.646176] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] self.force_reraise() [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] raise self.value [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] updated_port = self._update_port( [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] _ensure_no_port_binding_failure(port) [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] raise exception.PortBindingFailed(port_id=port['id']) [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] nova.exception.PortBindingFailed: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. [ 598.646471] env[61648]: ERROR nova.compute.manager [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] [ 598.646724] env[61648]: DEBUG nova.compute.utils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 598.647872] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.095s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.649900] env[61648]: INFO nova.compute.claims [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.653867] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Build of instance e51d3f4e-41d5-4190-a8e9-21c743aa3b5e was re-scheduled: Binding failed for port 473639d2-609a-43b0-9eea-0296702d5358, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 598.654362] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 598.654589] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.654767] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.654924] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.854348] env[61648]: INFO nova.compute.manager [-] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Took 1.04 seconds to deallocate network for instance. [ 598.854880] env[61648]: INFO nova.compute.manager [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 66562ea6-5d39-4b98-a9e2-0512295ab94f] Took 1.03 seconds to deallocate network for instance. [ 598.859381] env[61648]: DEBUG nova.compute.claims [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 598.859553] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.090352] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 599.090601] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 599.090756] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.090936] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 599.091370] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.091370] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 599.091452] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 599.091752] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 599.091943] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 599.092430] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 599.092721] env[61648]: DEBUG nova.virt.hardware [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 599.093650] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ce675d-cdfe-4adb-9f6f-4e10fffc6b16 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.103363] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208627f7-205b-406f-8f0d-11e4b49f698e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.120413] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.126784] env[61648]: DEBUG oslo.service.loopingcall [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.127075] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 599.127470] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b49954e9-9b47-4d48-bf2e-b88be62a5b64 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.145028] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.145028] env[61648]: value = "task-1336615" [ 599.145028] env[61648]: _type = "Task" [ 599.145028] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.158299] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336615, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.175464] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.280250] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.657591] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336615, 'name': CreateVM_Task, 'duration_secs': 0.258234} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.658155] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 599.658706] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.659048] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.659486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 599.662912] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f5857d9-5dbb-462f-9264-3a921168a887 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.670021] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 599.670021] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52216fa3-a6de-31c9-4d40-e1a665ee8495" [ 599.670021] env[61648]: _type = "Task" [ 599.670021] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.679546] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52216fa3-a6de-31c9-4d40-e1a665ee8495, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.787219] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.787586] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 599.787861] env[61648]: DEBUG nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.788157] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.823379] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.893051] env[61648]: INFO nova.scheduler.client.report [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Deleted allocations for instance 66562ea6-5d39-4b98-a9e2-0512295ab94f [ 600.167168] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81566ea0-77a7-4b03-8d78-2c2c40234c2f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.183825] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52216fa3-a6de-31c9-4d40-e1a665ee8495, 'name': SearchDatastore_Task, 'duration_secs': 0.008285} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.184825] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1113a2e-c62d-41d4-a683-2a3c9feed00e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.188138] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.188378] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.188611] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.188760] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.188938] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.189211] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1864cb0-cab6-41f9-a096-f0153bc0810d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.229200] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c11fca-39af-4094-92d8-60b1040d0ae1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.232177] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.232177] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.233022] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2cdd8d4-b124-474e-8da0-ff0b1f70bc62 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.241523] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80efb5da-2148-4f1e-94a1-235b937a2473 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.246738] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 600.246738] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52353db8-d274-6a6f-4ad9-3c472e946aeb" [ 600.246738] env[61648]: _type = "Task" [ 600.246738] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.260332] env[61648]: DEBUG nova.compute.provider_tree [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.264862] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52353db8-d274-6a6f-4ad9-3c472e946aeb, 'name': SearchDatastore_Task, 'duration_secs': 0.011296} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.265681] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-938d3570-87d3-41e2-b266-b986c2dafd1f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.273021] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 600.273021] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]522a5e57-e1df-75f3-58eb-ee1c877f163c" [ 600.273021] env[61648]: _type = "Task" [ 600.273021] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.279842] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]522a5e57-e1df-75f3-58eb-ee1c877f163c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.326298] env[61648]: DEBUG nova.network.neutron [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.406107] env[61648]: DEBUG oslo_concurrency.lockutils [None req-21ed985c-980b-4ccc-9751-756bf19ce6c3 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "66562ea6-5d39-4b98-a9e2-0512295ab94f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.308s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.764390] env[61648]: DEBUG nova.scheduler.client.report [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.786511] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]522a5e57-e1df-75f3-58eb-ee1c877f163c, 'name': SearchDatastore_Task, 'duration_secs': 0.008236} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.786511] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.786511] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 600.786511] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-715b4f42-3eed-4be7-b1bf-020bbca0e460 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.797111] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 600.797111] env[61648]: value = "task-1336616" [ 600.797111] env[61648]: _type = "Task" [ 600.797111] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.806115] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.829178] env[61648]: INFO nova.compute.manager [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: e51d3f4e-41d5-4190-a8e9-21c743aa3b5e] Took 1.04 seconds to deallocate network for instance. [ 600.913061] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 601.275822] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.275822] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 601.278527] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.645s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.281021] env[61648]: INFO nova.compute.claims [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.311017] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336616, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.438529] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.653845] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "959931f5-eebc-4544-af88-ea231301b4a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.653845] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "959931f5-eebc-4544-af88-ea231301b4a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.789096] env[61648]: DEBUG nova.compute.utils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.793020] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.793237] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.810356] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336616, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526185} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.812097] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 601.812307] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 601.814718] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07502028-d6f5-417f-9ec4-981b2b7bc7e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.819516] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 601.819516] env[61648]: value = "task-1336617" [ 601.819516] env[61648]: _type = "Task" [ 601.819516] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.827329] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.867376] env[61648]: DEBUG nova.policy [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8210a3ea4f35489bbea80e490f7a00c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91de5af21aba4db98efb62b0675a9c92', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.871969] env[61648]: INFO nova.scheduler.client.report [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Deleted allocations for instance e51d3f4e-41d5-4190-a8e9-21c743aa3b5e [ 602.296924] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 602.316427] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Successfully created port: 960e68d2-2f43-4240-b0d5-ff4a2af28a6c {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.335485] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0827} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.335485] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.336341] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633bde49-a5fa-415b-bd7e-ad82cc63ef77 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.366081] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 602.369956] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d3844a7-d8f0-4c6a-8696-12bc54868350 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.386069] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bde1d6f4-7974-4df8-b138-7196e7a557d5 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "e51d3f4e-41d5-4190-a8e9-21c743aa3b5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.872s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.393253] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 602.393253] env[61648]: value = "task-1336618" [ 602.393253] env[61648]: _type = "Task" [ 602.393253] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.409139] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336618, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.772212] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2182abbf-ea6b-4ae0-b49d-e549ef530c77 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.779832] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d50ba8c-e8ac-4346-a3d0-7d5654355083 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.816333] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5532c2d9-6c90-412c-92a1-09c512db93cb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.825173] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c112103f-7202-4047-b7f8-52f1824c120e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.837253] env[61648]: DEBUG nova.compute.provider_tree [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.888398] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 602.908269] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336618, 'name': ReconfigVM_Task, 'duration_secs': 0.334113} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.908269] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Reconfigured VM instance instance-00000015 to attach disk [datastore2] e918b827-ea37-4589-8999-e363aba4492d/e918b827-ea37-4589-8999-e363aba4492d.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 602.908269] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dff45bc-c301-40a4-8baf-37c3ce521900 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.913405] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 602.913405] env[61648]: value = "task-1336619" [ 602.913405] env[61648]: _type = "Task" [ 602.913405] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.921115] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336619, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.319706] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 603.340832] env[61648]: DEBUG nova.scheduler.client.report [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 603.353727] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.353966] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.354131] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.354310] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.354450] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.354591] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.354827] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.355012] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.355164] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.355322] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.355487] env[61648]: DEBUG nova.virt.hardware [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.356367] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37411e4-1322-4644-b555-5512895651ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.365396] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7701606-e692-43d1-baf9-13eecf61d893 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.425345] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336619, 'name': Rename_Task, 'duration_secs': 0.143654} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.425625] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 603.426118] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1f35e44-a448-460d-b264-9af42789d2ae {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.428445] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.433345] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 603.433345] env[61648]: value = "task-1336620" [ 603.433345] env[61648]: _type = "Task" [ 603.433345] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.441406] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.583116] env[61648]: DEBUG nova.compute.manager [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Received event network-changed-960e68d2-2f43-4240-b0d5-ff4a2af28a6c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.583418] env[61648]: DEBUG nova.compute.manager [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Refreshing instance network info cache due to event network-changed-960e68d2-2f43-4240-b0d5-ff4a2af28a6c. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 603.585752] env[61648]: DEBUG oslo_concurrency.lockutils [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] Acquiring lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.585752] env[61648]: DEBUG oslo_concurrency.lockutils [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] Acquired lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.585752] env[61648]: DEBUG nova.network.neutron [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Refreshing network info cache for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.675273] env[61648]: ERROR nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 603.675273] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.675273] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.675273] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.675273] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.675273] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.675273] env[61648]: ERROR nova.compute.manager raise self.value [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.675273] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 603.675273] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.675273] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 603.675893] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.675893] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 603.675893] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 603.675893] env[61648]: ERROR nova.compute.manager [ 603.675893] env[61648]: Traceback (most recent call last): [ 603.675893] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 603.675893] env[61648]: listener.cb(fileno) [ 603.675893] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.675893] env[61648]: result = function(*args, **kwargs) [ 603.675893] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.675893] env[61648]: return func(*args, **kwargs) [ 603.675893] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.675893] env[61648]: raise e [ 603.675893] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.675893] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 603.675893] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.675893] env[61648]: created_port_ids = self._update_ports_for_instance( [ 603.675893] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.675893] env[61648]: with excutils.save_and_reraise_exception(): [ 603.675893] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.675893] env[61648]: self.force_reraise() [ 603.675893] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.675893] env[61648]: raise self.value [ 603.675893] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.675893] env[61648]: updated_port = self._update_port( [ 603.675893] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.675893] env[61648]: _ensure_no_port_binding_failure(port) [ 603.675893] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.675893] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 603.676899] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 603.676899] env[61648]: Removing descriptor: 14 [ 603.676899] env[61648]: ERROR nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Traceback (most recent call last): [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] yield resources [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self.driver.spawn(context, instance, image_meta, [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.676899] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] vm_ref = self.build_virtual_machine(instance, [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] for vif in network_info: [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return self._sync_wrapper(fn, *args, **kwargs) [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self.wait() [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self[:] = self._gt.wait() [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return self._exit_event.wait() [ 603.677349] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] result = hub.switch() [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return self.greenlet.switch() [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] result = function(*args, **kwargs) [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return func(*args, **kwargs) [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] raise e [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] nwinfo = self.network_api.allocate_for_instance( [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.677897] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] created_port_ids = self._update_ports_for_instance( [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] with excutils.save_and_reraise_exception(): [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self.force_reraise() [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] raise self.value [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] updated_port = self._update_port( [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] _ensure_no_port_binding_failure(port) [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.678466] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] raise exception.PortBindingFailed(port_id=port['id']) [ 603.678873] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 603.678873] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] [ 603.678873] env[61648]: INFO nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Terminating instance [ 603.679247] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.846586] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.847244] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 603.850629] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.052s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.944856] env[61648]: DEBUG oslo_vmware.api [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336620, 'name': PowerOnVM_Task, 'duration_secs': 0.45135} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.945060] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.945280] env[61648]: DEBUG nova.compute.manager [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 603.946205] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad121852-4691-4b2c-aef7-125c21342798 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.111075] env[61648]: DEBUG nova.network.neutron [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.210329] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "831da774-5e37-4d49-a1fd-3eb421c7fcb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.210714] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "831da774-5e37-4d49-a1fd-3eb421c7fcb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.231302] env[61648]: DEBUG nova.network.neutron [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.362438] env[61648]: DEBUG nova.compute.utils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 604.364560] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 604.364560] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 604.454902] env[61648]: DEBUG nova.policy [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e0f547b01e94904800d532194a53be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f61b9de98314d118496a0b69e59a6e9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.465969] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.733953] env[61648]: DEBUG oslo_concurrency.lockutils [req-2893e2ab-a2cf-4048-b6f3-1ab7f2a818f7 req-6ce51560-ec61-4fba-a6d9-aee828b409ec service nova] Releasing lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.734446] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquired lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.735989] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.794582] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 604.794899] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 604.841484] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Successfully created port: d69f405b-b65c-42c6-b5ce-eafc37eb1e8b {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.867934] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 604.872178] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63aaf6ce-72c6-4ee4-aae6-c205125fa8c1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.881832] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3a5569-af4a-4008-850a-7a8376cf8022 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.919818] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50dceec-28d8-4588-bd10-b615d4725855 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.928451] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df78cfd9-f10a-497f-a5c7-bc3f6980e425 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.944109] env[61648]: DEBUG nova.compute.provider_tree [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.220146] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "e918b827-ea37-4589-8999-e363aba4492d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.220469] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "e918b827-ea37-4589-8999-e363aba4492d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.220718] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "e918b827-ea37-4589-8999-e363aba4492d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.220975] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "e918b827-ea37-4589-8999-e363aba4492d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.221194] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "e918b827-ea37-4589-8999-e363aba4492d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.223760] env[61648]: INFO nova.compute.manager [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Terminating instance [ 605.225924] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "refresh_cache-e918b827-ea37-4589-8999-e363aba4492d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.226158] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquired lock "refresh_cache-e918b827-ea37-4589-8999-e363aba4492d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.226357] env[61648]: DEBUG nova.network.neutron [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.261891] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.300920] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.301100] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 605.301299] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 605.334875] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.380185] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Successfully created port: be3dd51e-d36f-4491-a76a-10e82f636444 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.447554] env[61648]: DEBUG nova.scheduler.client.report [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.734299] env[61648]: DEBUG nova.compute.manager [req-af1a6633-2a97-4f18-a552-2f7564edf602 req-25f181fe-608c-4ff8-9e04-e634dcedcdb8 service nova] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Received event network-vif-deleted-960e68d2-2f43-4240-b0d5-ff4a2af28a6c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 605.745302] env[61648]: DEBUG nova.network.neutron [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.793660] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Successfully created port: 2a6dfb64-0337-4602-96e3-91d746b3d87e {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.806011] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e918b827-ea37-4589-8999-e363aba4492d] Skipping network cache update for instance because it is being deleted. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 605.806177] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 605.806306] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 605.806430] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 605.806552] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Didn't find any instances for network info cache update. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 605.806740] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.806893] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.807044] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.807191] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.807329] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.807470] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.807596] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 605.807738] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.838216] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Releasing lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.838715] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.838916] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.839680] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed134ea8-93d4-470d-93fe-e06dc902f73c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.842950] env[61648]: DEBUG nova.network.neutron [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.851582] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724cfdf5-7118-470f-b629-8eee64090911 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.881321] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25168ea9-24fb-4f63-b508-b5c3a47a77e4 could not be found. [ 605.882052] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.882052] env[61648]: INFO nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 605.882052] env[61648]: DEBUG oslo.service.loopingcall [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.882222] env[61648]: DEBUG nova.compute.manager [-] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.882317] env[61648]: DEBUG nova.network.neutron [-] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.886727] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 605.910811] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.911068] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.911227] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.911402] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.911545] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.911687] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.912174] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.912428] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.912672] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.912893] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.913136] env[61648]: DEBUG nova.virt.hardware [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.914308] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891ca79e-8d59-4698-8e7e-80048c7abd13 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.923288] env[61648]: DEBUG nova.network.neutron [-] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.925622] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfa95d4-e6d2-4437-988b-843a82672d60 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.952953] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.102s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.953677] env[61648]: ERROR nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Traceback (most recent call last): [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self.driver.spawn(context, instance, image_meta, [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] vm_ref = self.build_virtual_machine(instance, [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.953677] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] for vif in network_info: [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return self._sync_wrapper(fn, *args, **kwargs) [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self.wait() [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self[:] = self._gt.wait() [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return self._exit_event.wait() [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] result = hub.switch() [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.953987] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return self.greenlet.switch() [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] result = function(*args, **kwargs) [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] return func(*args, **kwargs) [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] raise e [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] nwinfo = self.network_api.allocate_for_instance( [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] created_port_ids = self._update_ports_for_instance( [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] with excutils.save_and_reraise_exception(): [ 605.954304] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] self.force_reraise() [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] raise self.value [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] updated_port = self._update_port( [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] _ensure_no_port_binding_failure(port) [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] raise exception.PortBindingFailed(port_id=port['id']) [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] nova.exception.PortBindingFailed: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. [ 605.954602] env[61648]: ERROR nova.compute.manager [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] [ 605.954997] env[61648]: DEBUG nova.compute.utils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 605.955886] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.002s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.958670] env[61648]: INFO nova.compute.claims [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.961510] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Build of instance 74c00b03-bd37-49f7-b0b9-88404302c071 was re-scheduled: Binding failed for port c43cbf92-2c48-41ba-822c-c6a62a56fae1, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 605.962577] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 605.962577] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquiring lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.962676] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Acquired lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.963148] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.316220] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.346071] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Releasing lock "refresh_cache-e918b827-ea37-4589-8999-e363aba4492d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.346499] env[61648]: DEBUG nova.compute.manager [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 606.346697] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.347576] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c838ae8-00a9-4fb7-8725-1f92aafa49f5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.355492] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 606.355788] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2fa8c2b-03d3-45f3-9336-a1779b30a177 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.363256] env[61648]: DEBUG oslo_vmware.api [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 606.363256] env[61648]: value = "task-1336621" [ 606.363256] env[61648]: _type = "Task" [ 606.363256] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.373050] env[61648]: DEBUG oslo_vmware.api [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.431631] env[61648]: DEBUG nova.network.neutron [-] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.494566] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.619693] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.874928] env[61648]: DEBUG oslo_vmware.api [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336621, 'name': PowerOffVM_Task, 'duration_secs': 0.133088} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.876025] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 606.876440] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 606.876807] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4ce47d8-2601-4459-bfc7-42faf6ad867d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.920022] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 606.920022] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 606.920022] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Deleting the datastore file [datastore2] e918b827-ea37-4589-8999-e363aba4492d {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.920022] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aea7f223-f63f-4ecd-9ba0-8686f25235b3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.929020] env[61648]: DEBUG oslo_vmware.api [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for the task: (returnval){ [ 606.929020] env[61648]: value = "task-1336623" [ 606.929020] env[61648]: _type = "Task" [ 606.929020] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.935721] env[61648]: INFO nova.compute.manager [-] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Took 1.05 seconds to deallocate network for instance. [ 606.937218] env[61648]: DEBUG oslo_vmware.api [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.940408] env[61648]: DEBUG nova.compute.claims [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 606.940758] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.122981] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Releasing lock "refresh_cache-74c00b03-bd37-49f7-b0b9-88404302c071" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.123262] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 607.123440] env[61648]: DEBUG nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 607.123629] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.146845] env[61648]: ERROR nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 607.146845] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.146845] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.146845] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.146845] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.146845] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.146845] env[61648]: ERROR nova.compute.manager raise self.value [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.146845] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 607.146845] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.146845] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 607.147347] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.147347] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 607.147347] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 607.147347] env[61648]: ERROR nova.compute.manager [ 607.147347] env[61648]: Traceback (most recent call last): [ 607.147347] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 607.147347] env[61648]: listener.cb(fileno) [ 607.147347] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.147347] env[61648]: result = function(*args, **kwargs) [ 607.147347] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.147347] env[61648]: return func(*args, **kwargs) [ 607.147347] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.147347] env[61648]: raise e [ 607.147347] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.147347] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 607.147347] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.147347] env[61648]: created_port_ids = self._update_ports_for_instance( [ 607.147347] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.147347] env[61648]: with excutils.save_and_reraise_exception(): [ 607.147347] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.147347] env[61648]: self.force_reraise() [ 607.147347] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.147347] env[61648]: raise self.value [ 607.147347] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.147347] env[61648]: updated_port = self._update_port( [ 607.147347] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.147347] env[61648]: _ensure_no_port_binding_failure(port) [ 607.147347] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.147347] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 607.148134] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 607.148134] env[61648]: Removing descriptor: 14 [ 607.148134] env[61648]: ERROR nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Traceback (most recent call last): [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] yield resources [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self.driver.spawn(context, instance, image_meta, [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.148134] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] vm_ref = self.build_virtual_machine(instance, [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] for vif in network_info: [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return self._sync_wrapper(fn, *args, **kwargs) [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self.wait() [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self[:] = self._gt.wait() [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return self._exit_event.wait() [ 607.148504] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] result = hub.switch() [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return self.greenlet.switch() [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] result = function(*args, **kwargs) [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return func(*args, **kwargs) [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] raise e [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] nwinfo = self.network_api.allocate_for_instance( [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.148898] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] created_port_ids = self._update_ports_for_instance( [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] with excutils.save_and_reraise_exception(): [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self.force_reraise() [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] raise self.value [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] updated_port = self._update_port( [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] _ensure_no_port_binding_failure(port) [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.149267] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] raise exception.PortBindingFailed(port_id=port['id']) [ 607.149602] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 607.149602] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] [ 607.149602] env[61648]: INFO nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Terminating instance [ 607.151850] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.153540] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.153818] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquired lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.153878] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.439267] env[61648]: DEBUG oslo_vmware.api [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Task: {'id': task-1336623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103629} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.439267] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.439267] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 607.439267] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.439267] env[61648]: INFO nova.compute.manager [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Took 1.09 seconds to destroy the instance on the hypervisor. [ 607.439540] env[61648]: DEBUG oslo.service.loopingcall [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.439540] env[61648]: DEBUG nova.compute.manager [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 607.439540] env[61648]: DEBUG nova.network.neutron [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.443125] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187daca3-1e68-4332-990f-51caee3ef165 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.450387] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efee1db-f61f-44be-a843-4fea538c7393 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.480644] env[61648]: DEBUG nova.network.neutron [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.483031] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac75beb2-8252-47f5-b478-1164416f39f9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.492187] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad61aac1-9504-4c43-8cf8-71ccc397b6df {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.505363] env[61648]: DEBUG nova.compute.provider_tree [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.659019] env[61648]: DEBUG nova.network.neutron [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.702962] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.764364] env[61648]: DEBUG nova.compute.manager [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Received event network-changed-d69f405b-b65c-42c6-b5ce-eafc37eb1e8b {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.764364] env[61648]: DEBUG nova.compute.manager [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Refreshing instance network info cache due to event network-changed-d69f405b-b65c-42c6-b5ce-eafc37eb1e8b. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 607.764752] env[61648]: DEBUG oslo_concurrency.lockutils [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] Acquiring lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.812247] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.987174] env[61648]: DEBUG nova.network.neutron [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.012920] env[61648]: DEBUG nova.scheduler.client.report [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.163875] env[61648]: INFO nova.compute.manager [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] [instance: 74c00b03-bd37-49f7-b0b9-88404302c071] Took 1.04 seconds to deallocate network for instance. [ 608.317174] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Releasing lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.318058] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 608.318058] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 608.318230] env[61648]: DEBUG oslo_concurrency.lockutils [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] Acquired lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.318306] env[61648]: DEBUG nova.network.neutron [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Refreshing network info cache for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.319473] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f40212b8-014d-491a-8e4d-279c36a5a371 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.332962] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765ea475-15dd-4c9d-bb8e-3f82720e4efa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.355155] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d23a9ab2-01ed-4d41-b89f-445ecc5f410f could not be found. [ 608.355974] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.356324] env[61648]: INFO nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.356635] env[61648]: DEBUG oslo.service.loopingcall [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.357320] env[61648]: DEBUG nova.compute.manager [-] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.357876] env[61648]: DEBUG nova.network.neutron [-] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.410913] env[61648]: DEBUG nova.network.neutron [-] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.492937] env[61648]: INFO nova.compute.manager [-] [instance: e918b827-ea37-4589-8999-e363aba4492d] Took 1.05 seconds to deallocate network for instance. [ 608.520855] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.521489] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 608.531021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.129s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.531021] env[61648]: INFO nova.compute.claims [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.888570] env[61648]: DEBUG nova.network.neutron [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.001783] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.033755] env[61648]: DEBUG nova.compute.utils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 609.041270] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 609.041270] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 609.132857] env[61648]: DEBUG nova.network.neutron [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.212060] env[61648]: INFO nova.scheduler.client.report [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Deleted allocations for instance 74c00b03-bd37-49f7-b0b9-88404302c071 [ 609.281447] env[61648]: DEBUG nova.policy [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1679b8e397bd4dffab66422c61d72078', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61cc7ac999324335bf3a0bb3d84f7f4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 609.543562] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 609.637094] env[61648]: DEBUG oslo_concurrency.lockutils [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] Releasing lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.637358] env[61648]: DEBUG nova.compute.manager [req-ecce6f9e-3e63-4047-8530-b1b4a290bdaa req-19d214dd-09f4-4edf-a3f0-6cfc85ba1587 service nova] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Received event network-vif-deleted-d69f405b-b65c-42c6-b5ce-eafc37eb1e8b {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 609.667399] env[61648]: DEBUG nova.network.neutron [-] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.674463] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Successfully created port: 9aff1da9-b577-425f-993c-4076d53a7eb0 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.724281] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9ee0eeef-505e-4e00-ad17-8a7f03a6c54f tempest-ServersV294TestFqdnHostnames-1573356480 tempest-ServersV294TestFqdnHostnames-1573356480-project-member] Lock "74c00b03-bd37-49f7-b0b9-88404302c071" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.165s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.073218] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d78ed8-7d79-4bc0-8ed5-e887519005d5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.082546] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf46d206-6ed4-4375-bb15-5a2e30577a19 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.115821] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfdf634-4cc0-49e0-bf54-60295db4ef80 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.125251] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06edf45-e204-437c-9a0c-90c98dc9e3d0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.141687] env[61648]: DEBUG nova.compute.provider_tree [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.174189] env[61648]: INFO nova.compute.manager [-] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Took 1.82 seconds to deallocate network for instance. [ 610.177816] env[61648]: DEBUG nova.compute.claims [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.178088] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.230090] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 610.554571] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 610.597090] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 610.597259] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 610.597928] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.597928] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 610.597928] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.597928] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 610.598131] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 610.598616] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 610.598810] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 610.599024] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 610.599198] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 610.600138] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5894588-1de5-4c30-8aa4-b74ef50560a9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.610544] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6a712d-956d-4063-b4f7-0a87b0c176ea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.645338] env[61648]: DEBUG nova.scheduler.client.report [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 610.760636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.858916] env[61648]: DEBUG nova.compute.manager [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Received event network-changed-9aff1da9-b577-425f-993c-4076d53a7eb0 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 610.859170] env[61648]: DEBUG nova.compute.manager [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Refreshing instance network info cache due to event network-changed-9aff1da9-b577-425f-993c-4076d53a7eb0. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 610.859421] env[61648]: DEBUG oslo_concurrency.lockutils [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] Acquiring lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.859607] env[61648]: DEBUG oslo_concurrency.lockutils [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] Acquired lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.859794] env[61648]: DEBUG nova.network.neutron [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Refreshing network info cache for port 9aff1da9-b577-425f-993c-4076d53a7eb0 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.144096] env[61648]: ERROR nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 611.144096] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.144096] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.144096] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.144096] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.144096] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.144096] env[61648]: ERROR nova.compute.manager raise self.value [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.144096] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 611.144096] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.144096] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 611.144713] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.144713] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 611.144713] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 611.144713] env[61648]: ERROR nova.compute.manager [ 611.144713] env[61648]: Traceback (most recent call last): [ 611.144713] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 611.144713] env[61648]: listener.cb(fileno) [ 611.144713] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.144713] env[61648]: result = function(*args, **kwargs) [ 611.144713] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.144713] env[61648]: return func(*args, **kwargs) [ 611.144713] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.144713] env[61648]: raise e [ 611.144713] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.144713] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 611.144713] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.144713] env[61648]: created_port_ids = self._update_ports_for_instance( [ 611.144713] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.144713] env[61648]: with excutils.save_and_reraise_exception(): [ 611.144713] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.144713] env[61648]: self.force_reraise() [ 611.144713] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.144713] env[61648]: raise self.value [ 611.144713] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.144713] env[61648]: updated_port = self._update_port( [ 611.144713] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.144713] env[61648]: _ensure_no_port_binding_failure(port) [ 611.144713] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.144713] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 611.145488] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 611.145488] env[61648]: Removing descriptor: 16 [ 611.145488] env[61648]: ERROR nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Traceback (most recent call last): [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] yield resources [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self.driver.spawn(context, instance, image_meta, [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 611.145488] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] vm_ref = self.build_virtual_machine(instance, [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] vif_infos = vmwarevif.get_vif_info(self._session, [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] for vif in network_info: [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return self._sync_wrapper(fn, *args, **kwargs) [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self.wait() [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self[:] = self._gt.wait() [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return self._exit_event.wait() [ 611.145785] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] result = hub.switch() [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return self.greenlet.switch() [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] result = function(*args, **kwargs) [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return func(*args, **kwargs) [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] raise e [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] nwinfo = self.network_api.allocate_for_instance( [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.146098] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] created_port_ids = self._update_ports_for_instance( [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] with excutils.save_and_reraise_exception(): [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self.force_reraise() [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] raise self.value [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] updated_port = self._update_port( [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] _ensure_no_port_binding_failure(port) [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.146409] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] raise exception.PortBindingFailed(port_id=port['id']) [ 611.146705] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 611.146705] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] [ 611.146705] env[61648]: INFO nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Terminating instance [ 611.149570] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.150323] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.151314] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 611.154898] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.785s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.159018] env[61648]: INFO nova.compute.claims [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.394715] env[61648]: DEBUG nova.network.neutron [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.544822] env[61648]: DEBUG nova.network.neutron [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.664748] env[61648]: DEBUG nova.compute.utils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.667643] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 611.667643] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.772202] env[61648]: DEBUG nova.policy [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1679b8e397bd4dffab66422c61d72078', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61cc7ac999324335bf3a0bb3d84f7f4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 612.050298] env[61648]: DEBUG oslo_concurrency.lockutils [req-da135bd2-1cb2-46a2-8334-c9035c9a0ae7 req-430f29bc-f215-4682-975b-2114b883e871 service nova] Releasing lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.051218] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.052097] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.168032] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 612.239994] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Successfully created port: 2e4836a2-279b-447b-bb49-e1d15644d2bc {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.262791] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "14854fd0-680a-48a2-b1d6-50e75624aef2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.263142] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "14854fd0-680a-48a2-b1d6-50e75624aef2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.581986] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.617933] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd77184-8807-4ee6-a64e-ad799d0ebcde {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.626186] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad396d3-d465-43b8-a625-3ef90208a86b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.662198] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8bb4a0-2de9-4c8c-a9ee-12a0f9228152 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.669691] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f553f8-4699-41ec-9a37-6cd144bb1d1d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.686888] env[61648]: DEBUG nova.compute.provider_tree [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.715641] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.135948] env[61648]: DEBUG nova.compute.manager [req-8d4e5bbb-fa2e-4bc3-ada5-217f6b08d4a7 req-a3e0283d-2980-43c6-8178-f246589f0369 service nova] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Received event network-vif-deleted-9aff1da9-b577-425f-993c-4076d53a7eb0 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 613.190151] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 613.192968] env[61648]: DEBUG nova.scheduler.client.report [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 613.218184] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.218939] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 613.219349] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.222306] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a0c5380-4a43-4323-86b0-72154b62b29e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.230680] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.230905] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.231120] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.231443] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.231695] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.231947] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.232378] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.232378] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.232546] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.232708] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.232876] env[61648]: DEBUG nova.virt.hardware [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.235762] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b847e251-0f11-43d1-bbe1-86cf57d67315 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.246788] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19df7024-0026-4770-bec3-59ec5c937169 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.254904] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af721abc-1fbb-4628-87d7-d048d1fe8473 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.262197] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d1713c19-45cc-4d33-8b23-a9516bbaa25f could not be found. [ 613.262420] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.262597] env[61648]: INFO nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 613.262838] env[61648]: DEBUG oslo.service.loopingcall [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.263685] env[61648]: DEBUG nova.compute.manager [-] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 613.263792] env[61648]: DEBUG nova.network.neutron [-] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.299440] env[61648]: DEBUG nova.network.neutron [-] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.528628] env[61648]: ERROR nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 613.528628] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.528628] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.528628] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.528628] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.528628] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.528628] env[61648]: ERROR nova.compute.manager raise self.value [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.528628] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.528628] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.528628] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.529135] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.529135] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.529135] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 613.529135] env[61648]: ERROR nova.compute.manager [ 613.529135] env[61648]: Traceback (most recent call last): [ 613.529135] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.529135] env[61648]: listener.cb(fileno) [ 613.529135] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.529135] env[61648]: result = function(*args, **kwargs) [ 613.529135] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.529135] env[61648]: return func(*args, **kwargs) [ 613.529135] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.529135] env[61648]: raise e [ 613.529135] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.529135] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 613.529135] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.529135] env[61648]: created_port_ids = self._update_ports_for_instance( [ 613.529135] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.529135] env[61648]: with excutils.save_and_reraise_exception(): [ 613.529135] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.529135] env[61648]: self.force_reraise() [ 613.529135] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.529135] env[61648]: raise self.value [ 613.529135] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.529135] env[61648]: updated_port = self._update_port( [ 613.529135] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.529135] env[61648]: _ensure_no_port_binding_failure(port) [ 613.529135] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.529135] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.529893] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 613.529893] env[61648]: Removing descriptor: 16 [ 613.530766] env[61648]: ERROR nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Traceback (most recent call last): [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] yield resources [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self.driver.spawn(context, instance, image_meta, [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] vm_ref = self.build_virtual_machine(instance, [ 613.530766] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] for vif in network_info: [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return self._sync_wrapper(fn, *args, **kwargs) [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self.wait() [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self[:] = self._gt.wait() [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return self._exit_event.wait() [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.531204] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] result = hub.switch() [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return self.greenlet.switch() [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] result = function(*args, **kwargs) [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return func(*args, **kwargs) [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] raise e [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] nwinfo = self.network_api.allocate_for_instance( [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] created_port_ids = self._update_ports_for_instance( [ 613.531559] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] with excutils.save_and_reraise_exception(): [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self.force_reraise() [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] raise self.value [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] updated_port = self._update_port( [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] _ensure_no_port_binding_failure(port) [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] raise exception.PortBindingFailed(port_id=port['id']) [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 613.531973] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] [ 613.532341] env[61648]: INFO nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Terminating instance [ 613.534053] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.534053] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.534053] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.698631] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.699155] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 613.701803] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.733s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.703537] env[61648]: INFO nova.compute.claims [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.802439] env[61648]: DEBUG nova.network.neutron [-] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.060339] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.191254] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.210466] env[61648]: DEBUG nova.compute.utils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 614.212149] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 614.212319] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 614.262427] env[61648]: DEBUG nova.policy [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75788746b2214f2e8c1a8884c89ddb9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd94e7e89f424d34920f0fa92acf3226', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 614.305280] env[61648]: INFO nova.compute.manager [-] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Took 1.04 seconds to deallocate network for instance. [ 614.307660] env[61648]: DEBUG nova.compute.claims [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 614.307843] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.458676] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "b9130bac-f92b-4208-b84c-852f4a269153" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.458921] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "b9130bac-f92b-4208-b84c-852f4a269153" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.624755] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Successfully created port: e219b76e-a60a-4f74-977c-d18997d31538 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.696694] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.697142] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.697336] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.697641] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c286a8f-c5a3-4440-803d-487819624d5f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.710997] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbb3973-2758-4231-9eba-56df02c712f7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.721977] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 614.741668] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87971b67-572c-4d5f-99b7-dab08aea10c1 could not be found. [ 614.742050] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.742139] env[61648]: INFO nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 614.742758] env[61648]: DEBUG oslo.service.loopingcall [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.742758] env[61648]: DEBUG nova.compute.manager [-] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.742758] env[61648]: DEBUG nova.network.neutron [-] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.775289] env[61648]: DEBUG nova.network.neutron [-] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.161246] env[61648]: DEBUG nova.compute.manager [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Received event network-changed-2e4836a2-279b-447b-bb49-e1d15644d2bc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.161445] env[61648]: DEBUG nova.compute.manager [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Refreshing instance network info cache due to event network-changed-2e4836a2-279b-447b-bb49-e1d15644d2bc. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 615.161717] env[61648]: DEBUG oslo_concurrency.lockutils [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] Acquiring lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.162045] env[61648]: DEBUG oslo_concurrency.lockutils [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] Acquired lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.162273] env[61648]: DEBUG nova.network.neutron [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Refreshing network info cache for port 2e4836a2-279b-447b-bb49-e1d15644d2bc {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.264103] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5d2e9a-4970-482f-8cbe-2a3ce9b4a24f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.272020] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30bed23-ec13-4132-940c-56468e4c3eaa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.277471] env[61648]: DEBUG nova.network.neutron [-] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.306130] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d7096c-bfc5-4e6b-bb5c-5fccb3a6fbe4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.314619] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad38f0f9-fc6a-4cb5-9540-b4118abef561 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.333244] env[61648]: DEBUG nova.compute.provider_tree [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.676730] env[61648]: ERROR nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 615.676730] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.676730] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 615.676730] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 615.676730] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.676730] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.676730] env[61648]: ERROR nova.compute.manager raise self.value [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 615.676730] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 615.676730] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.676730] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 615.677250] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.677250] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 615.677250] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 615.677250] env[61648]: ERROR nova.compute.manager [ 615.677250] env[61648]: Traceback (most recent call last): [ 615.677250] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 615.677250] env[61648]: listener.cb(fileno) [ 615.677250] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.677250] env[61648]: result = function(*args, **kwargs) [ 615.677250] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 615.677250] env[61648]: return func(*args, **kwargs) [ 615.677250] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.677250] env[61648]: raise e [ 615.677250] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.677250] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 615.677250] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 615.677250] env[61648]: created_port_ids = self._update_ports_for_instance( [ 615.677250] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 615.677250] env[61648]: with excutils.save_and_reraise_exception(): [ 615.677250] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.677250] env[61648]: self.force_reraise() [ 615.677250] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.677250] env[61648]: raise self.value [ 615.677250] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 615.677250] env[61648]: updated_port = self._update_port( [ 615.677250] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.677250] env[61648]: _ensure_no_port_binding_failure(port) [ 615.677250] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.677250] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 615.678744] env[61648]: nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 615.678744] env[61648]: Removing descriptor: 16 [ 615.681879] env[61648]: DEBUG nova.network.neutron [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.734260] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 615.761645] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 615.761893] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 615.762064] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.763891] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 615.763891] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.763891] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 615.763891] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 615.763891] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 615.764191] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 615.764191] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 615.764191] env[61648]: DEBUG nova.virt.hardware [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 615.764403] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5f9c92-b93a-478e-a0cb-3e639bc07e16 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.767810] env[61648]: DEBUG nova.network.neutron [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.774144] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198b0845-19e8-46f0-9d33-61ff5d3684b9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.779185] env[61648]: INFO nova.compute.manager [-] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Took 1.04 seconds to deallocate network for instance. [ 615.788719] env[61648]: ERROR nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Traceback (most recent call last): [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] yield resources [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self.driver.spawn(context, instance, image_meta, [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] vm_ref = self.build_virtual_machine(instance, [ 615.788719] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] vif_infos = vmwarevif.get_vif_info(self._session, [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] for vif in network_info: [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] return self._sync_wrapper(fn, *args, **kwargs) [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self.wait() [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self[:] = self._gt.wait() [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] return self._exit_event.wait() [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 615.789097] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] current.throw(*self._exc) [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] result = function(*args, **kwargs) [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] return func(*args, **kwargs) [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] raise e [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] nwinfo = self.network_api.allocate_for_instance( [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] created_port_ids = self._update_ports_for_instance( [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] with excutils.save_and_reraise_exception(): [ 615.789395] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self.force_reraise() [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] raise self.value [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] updated_port = self._update_port( [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] _ensure_no_port_binding_failure(port) [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] raise exception.PortBindingFailed(port_id=port['id']) [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 615.789736] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] [ 615.789736] env[61648]: INFO nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Terminating instance [ 615.790812] env[61648]: DEBUG nova.compute.claims [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.790980] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.797685] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.797843] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.798012] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.836291] env[61648]: DEBUG nova.scheduler.client.report [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.271033] env[61648]: DEBUG oslo_concurrency.lockutils [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] Releasing lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.271373] env[61648]: DEBUG nova.compute.manager [req-920653f9-d53f-4c89-a428-2ca1a4683ae3 req-358572f2-0dcd-48cb-896d-2bb9e713be67 service nova] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Received event network-vif-deleted-2e4836a2-279b-447b-bb49-e1d15644d2bc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 616.318420] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.341204] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.341736] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 616.344644] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.485s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.428987] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.850037] env[61648]: DEBUG nova.compute.utils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.855857] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 616.856036] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 616.902320] env[61648]: DEBUG nova.policy [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '843ba5059eae4281a9d7cf3a302b3f14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '513f01bb7fdc4179bec39c058a9b8b00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.931404] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.931818] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 616.932018] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 616.932742] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bcdd0fd-0933-4979-ae3a-0bca2fc37ae5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.942220] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c49678-c3ee-4930-bc99-c38f9e85e2e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.967829] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3cd90969-d884-49fc-a2c3-8501e2c51ff6 could not be found. [ 616.968089] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 616.968275] env[61648]: INFO nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 616.968515] env[61648]: DEBUG oslo.service.loopingcall [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 616.971482] env[61648]: DEBUG nova.compute.manager [-] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 616.971577] env[61648]: DEBUG nova.network.neutron [-] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 616.990114] env[61648]: DEBUG nova.network.neutron [-] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.195146] env[61648]: DEBUG nova.compute.manager [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Received event network-changed-e219b76e-a60a-4f74-977c-d18997d31538 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 617.195146] env[61648]: DEBUG nova.compute.manager [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Refreshing instance network info cache due to event network-changed-e219b76e-a60a-4f74-977c-d18997d31538. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 617.195146] env[61648]: DEBUG oslo_concurrency.lockutils [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] Acquiring lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.195146] env[61648]: DEBUG oslo_concurrency.lockutils [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] Acquired lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.195523] env[61648]: DEBUG nova.network.neutron [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Refreshing network info cache for port e219b76e-a60a-4f74-977c-d18997d31538 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 617.199920] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Successfully created port: a13ef10b-4efa-4047-9b34-ae3bc97e857b {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.267104] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaba14e9-a7ee-47e1-978a-96c1064e21f1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.276624] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3652b15b-a5bc-418c-a2c4-ba7f8cf3d309 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.310804] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382e016e-8b1c-439f-8780-79d8311ca2df {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.319571] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b61ae79-55e9-4a5b-ab18-cc198cf55660 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.335906] env[61648]: DEBUG nova.compute.provider_tree [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.353723] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 617.496484] env[61648]: DEBUG nova.network.neutron [-] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.719751] env[61648]: DEBUG nova.network.neutron [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.817239] env[61648]: DEBUG nova.network.neutron [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.840058] env[61648]: DEBUG nova.scheduler.client.report [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.994414] env[61648]: DEBUG nova.compute.manager [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Received event network-changed-a13ef10b-4efa-4047-9b34-ae3bc97e857b {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 617.994615] env[61648]: DEBUG nova.compute.manager [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Refreshing instance network info cache due to event network-changed-a13ef10b-4efa-4047-9b34-ae3bc97e857b. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 617.994890] env[61648]: DEBUG oslo_concurrency.lockutils [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] Acquiring lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.995708] env[61648]: DEBUG oslo_concurrency.lockutils [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] Acquired lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.995974] env[61648]: DEBUG nova.network.neutron [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Refreshing network info cache for port a13ef10b-4efa-4047-9b34-ae3bc97e857b {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 617.999339] env[61648]: INFO nova.compute.manager [-] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Took 1.03 seconds to deallocate network for instance. [ 618.002278] env[61648]: DEBUG nova.compute.claims [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 618.002519] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.191605] env[61648]: ERROR nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 618.191605] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.191605] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.191605] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.191605] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.191605] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.191605] env[61648]: ERROR nova.compute.manager raise self.value [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.191605] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 618.191605] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.191605] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 618.192065] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.192065] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 618.192065] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 618.192065] env[61648]: ERROR nova.compute.manager [ 618.192065] env[61648]: Traceback (most recent call last): [ 618.192065] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 618.192065] env[61648]: listener.cb(fileno) [ 618.192065] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.192065] env[61648]: result = function(*args, **kwargs) [ 618.192065] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.192065] env[61648]: return func(*args, **kwargs) [ 618.192065] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.192065] env[61648]: raise e [ 618.192065] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.192065] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 618.192065] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.192065] env[61648]: created_port_ids = self._update_ports_for_instance( [ 618.192065] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.192065] env[61648]: with excutils.save_and_reraise_exception(): [ 618.192065] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.192065] env[61648]: self.force_reraise() [ 618.192065] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.192065] env[61648]: raise self.value [ 618.192065] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.192065] env[61648]: updated_port = self._update_port( [ 618.192065] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.192065] env[61648]: _ensure_no_port_binding_failure(port) [ 618.192065] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.192065] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 618.192814] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 618.192814] env[61648]: Removing descriptor: 19 [ 618.320093] env[61648]: DEBUG oslo_concurrency.lockutils [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] Releasing lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.320407] env[61648]: DEBUG nova.compute.manager [req-d94c8672-8c2b-4a3c-a24d-4597a5c7fe10 req-b3d1ab7a-1d0d-4e2c-b19e-6eafed97e7d5 service nova] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Received event network-vif-deleted-e219b76e-a60a-4f74-977c-d18997d31538 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.349259] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.005s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.349873] env[61648]: ERROR nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Traceback (most recent call last): [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self.driver.spawn(context, instance, image_meta, [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] vm_ref = self.build_virtual_machine(instance, [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.349873] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] for vif in network_info: [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return self._sync_wrapper(fn, *args, **kwargs) [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self.wait() [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self[:] = self._gt.wait() [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return self._exit_event.wait() [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] result = hub.switch() [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.351214] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return self.greenlet.switch() [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] result = function(*args, **kwargs) [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] return func(*args, **kwargs) [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] raise e [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] nwinfo = self.network_api.allocate_for_instance( [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] created_port_ids = self._update_ports_for_instance( [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] with excutils.save_and_reraise_exception(): [ 618.351565] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] self.force_reraise() [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] raise self.value [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] updated_port = self._update_port( [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] _ensure_no_port_binding_failure(port) [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] raise exception.PortBindingFailed(port_id=port['id']) [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] nova.exception.PortBindingFailed: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. [ 618.351925] env[61648]: ERROR nova.compute.manager [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] [ 618.352286] env[61648]: DEBUG nova.compute.utils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 618.352286] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.914s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.356019] env[61648]: INFO nova.compute.claims [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.361663] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 618.364140] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Build of instance 16e92598-3eba-42c1-b9ec-3b2b91231267 was re-scheduled: Binding failed for port c6388b06-4edc-443f-83b4-6856374e284f, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 618.364614] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 618.364856] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.364997] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquired lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.365171] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.414295] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.416017] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.416017] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.416017] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.416017] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.416017] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.416231] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.416231] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.416231] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.416231] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.416342] env[61648]: DEBUG nova.virt.hardware [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.417506] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f503ed80-ad6e-46db-a4d9-519fedd2a5c3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.425990] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeaaebbe-5fd2-4891-ba76-be9f37e3d711 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.443666] env[61648]: ERROR nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Traceback (most recent call last): [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] yield resources [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self.driver.spawn(context, instance, image_meta, [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] vm_ref = self.build_virtual_machine(instance, [ 618.443666] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] for vif in network_info: [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] return self._sync_wrapper(fn, *args, **kwargs) [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self.wait() [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self[:] = self._gt.wait() [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] return self._exit_event.wait() [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 618.444033] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] current.throw(*self._exc) [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] result = function(*args, **kwargs) [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] return func(*args, **kwargs) [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] raise e [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] nwinfo = self.network_api.allocate_for_instance( [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] created_port_ids = self._update_ports_for_instance( [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] with excutils.save_and_reraise_exception(): [ 618.444475] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self.force_reraise() [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] raise self.value [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] updated_port = self._update_port( [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] _ensure_no_port_binding_failure(port) [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] raise exception.PortBindingFailed(port_id=port['id']) [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 618.444854] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] [ 618.444854] env[61648]: INFO nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Terminating instance [ 618.449286] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquiring lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.516567] env[61648]: DEBUG nova.network.neutron [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.608393] env[61648]: DEBUG nova.network.neutron [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.888866] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.982024] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.110655] env[61648]: DEBUG oslo_concurrency.lockutils [req-546cd063-08e8-417b-82e3-aeee89f615b3 req-22ff6c66-f424-427d-8751-5af5801024de service nova] Releasing lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.111088] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquired lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.111279] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.483346] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Releasing lock "refresh_cache-16e92598-3eba-42c1-b9ec-3b2b91231267" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.483665] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 619.483903] env[61648]: DEBUG nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 619.484131] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 619.513428] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.799455] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.817020] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec474074-c56b-4c5a-a04d-0e6045785a1e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.824430] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65085174-5e00-4fc9-bfa4-9661e204d485 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.859216] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338413f3-97e4-4b44-87c5-0730bacbe1dd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.868413] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c627e4-66ff-4404-bd7e-3349331935a1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.887547] env[61648]: DEBUG nova.compute.provider_tree [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.925215] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.018454] env[61648]: DEBUG nova.network.neutron [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.050612] env[61648]: DEBUG nova.compute.manager [req-57e1c233-0c69-4bba-8655-a4513649c0b7 req-dbf4f803-0c4d-44df-b113-6e9b4e471666 service nova] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Received event network-vif-deleted-a13ef10b-4efa-4047-9b34-ae3bc97e857b {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 620.390687] env[61648]: DEBUG nova.scheduler.client.report [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.427625] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Releasing lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.428074] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 620.428270] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.428545] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e5a63c4-69ce-4823-8b5b-963a65a0f5b6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.437448] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f10ac2-62aa-40bd-a573-bf15f197da23 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.459532] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49284c45-b77b-4992-8437-d9d31ba12539 could not be found. [ 620.459742] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.459886] env[61648]: INFO nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Took 0.03 seconds to destroy the instance on the hypervisor. [ 620.460137] env[61648]: DEBUG oslo.service.loopingcall [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.460360] env[61648]: DEBUG nova.compute.manager [-] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.460454] env[61648]: DEBUG nova.network.neutron [-] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.477267] env[61648]: DEBUG nova.network.neutron [-] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.520327] env[61648]: INFO nova.compute.manager [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: 16e92598-3eba-42c1-b9ec-3b2b91231267] Took 1.04 seconds to deallocate network for instance. [ 620.896327] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.896850] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 620.904727] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.476s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.910999] env[61648]: INFO nova.compute.claims [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.979371] env[61648]: DEBUG nova.network.neutron [-] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.416939] env[61648]: DEBUG nova.compute.utils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 621.420168] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 621.420353] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.464824] env[61648]: DEBUG nova.policy [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3df0deac391742239ddb06b5a63fa3f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6eab3dc853f45a697c2009703f8393c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 621.482305] env[61648]: INFO nova.compute.manager [-] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Took 1.02 seconds to deallocate network for instance. [ 621.484685] env[61648]: DEBUG nova.compute.claims [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.485031] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.551863] env[61648]: INFO nova.scheduler.client.report [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Deleted allocations for instance 16e92598-3eba-42c1-b9ec-3b2b91231267 [ 621.789437] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Successfully created port: c58fb08c-955e-4c6c-a8d7-adb284aff803 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.921327] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 622.062420] env[61648]: DEBUG oslo_concurrency.lockutils [None req-83979e6b-e574-4e58-b84b-1a8c702ef313 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "16e92598-3eba-42c1-b9ec-3b2b91231267" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.764s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.431106] env[61648]: INFO nova.virt.block_device [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Booting with volume 3bea0f2a-9c13-4466-8549-a80c70193b42 at /dev/sda [ 622.434744] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e40c5e-81ba-455a-905a-642e25fe9418 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.456577] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f2c39b-f2ab-4498-ad91-631db866d073 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.489258] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0664cca-7419-4a60-b444-51b7205b5cfb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.491790] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bb74bb4-6735-404c-a465-cd08a74babce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.499110] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfa74ae-9d81-4ab1-8c7a-00e4a03d1fde {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.505046] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a053e4ea-19e9-4eac-8d8b-05898902f5bc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.525549] env[61648]: DEBUG nova.compute.provider_tree [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.530487] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6ea9441-ca4e-4a46-9ee6-9679a78d3193 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.539845] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba642fc-c38e-4c23-998f-4e8b33fdf60a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.561472] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa6c043-e942-4f40-a2d5-842ec182452b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.567876] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec8ce57-db8c-4ad6-b1c5-72a7032afbfa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.570478] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 622.584582] env[61648]: DEBUG nova.virt.block_device [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Updating existing volume attachment record: dda4adc3-0586-4cb4-aa57-798b203f7c60 {{(pid=61648) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 622.758455] env[61648]: DEBUG nova.compute.manager [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Received event network-changed-c58fb08c-955e-4c6c-a8d7-adb284aff803 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.758650] env[61648]: DEBUG nova.compute.manager [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Refreshing instance network info cache due to event network-changed-c58fb08c-955e-4c6c-a8d7-adb284aff803. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 622.758892] env[61648]: DEBUG oslo_concurrency.lockutils [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] Acquiring lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.759094] env[61648]: DEBUG oslo_concurrency.lockutils [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] Acquired lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.759732] env[61648]: DEBUG nova.network.neutron [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Refreshing network info cache for port c58fb08c-955e-4c6c-a8d7-adb284aff803 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.920710] env[61648]: ERROR nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 622.920710] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.920710] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.920710] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.920710] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.920710] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.920710] env[61648]: ERROR nova.compute.manager raise self.value [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.920710] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 622.920710] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.920710] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 622.922631] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.922631] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 622.922631] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 622.922631] env[61648]: ERROR nova.compute.manager [ 622.922631] env[61648]: Traceback (most recent call last): [ 622.922631] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 622.922631] env[61648]: listener.cb(fileno) [ 622.922631] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.922631] env[61648]: result = function(*args, **kwargs) [ 622.922631] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.922631] env[61648]: return func(*args, **kwargs) [ 622.922631] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.922631] env[61648]: raise e [ 622.922631] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.922631] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 622.922631] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.922631] env[61648]: created_port_ids = self._update_ports_for_instance( [ 622.922631] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.922631] env[61648]: with excutils.save_and_reraise_exception(): [ 622.922631] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.922631] env[61648]: self.force_reraise() [ 622.922631] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.922631] env[61648]: raise self.value [ 622.922631] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.922631] env[61648]: updated_port = self._update_port( [ 622.922631] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.922631] env[61648]: _ensure_no_port_binding_failure(port) [ 622.922631] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.922631] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 622.924319] env[61648]: nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 622.924319] env[61648]: Removing descriptor: 19 [ 623.031458] env[61648]: DEBUG nova.scheduler.client.report [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.090371] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.282093] env[61648]: DEBUG nova.network.neutron [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.397656] env[61648]: DEBUG nova.network.neutron [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.542019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.542019] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 623.544819] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.079s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.545047] env[61648]: DEBUG nova.objects.instance [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] [instance: e918b827-ea37-4589-8999-e363aba4492d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61648) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 623.571882] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "d76d8aed-9126-4d21-9df9-6317c3b19f65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.571882] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "d76d8aed-9126-4d21-9df9-6317c3b19f65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.900289] env[61648]: DEBUG oslo_concurrency.lockutils [req-ab6da35f-8c34-4f03-8664-296c62dba9ab req-3e71f0a5-6ce8-4372-ab16-3ae0605dc4c9 service nova] Releasing lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.052455] env[61648]: DEBUG nova.compute.utils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 624.057540] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 624.057723] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 624.120461] env[61648]: DEBUG nova.policy [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7ab7dc3a1884ed4a1593d4aebb04f91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14fe6742c14545e7a8bc358f01daa654', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 624.500241] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Successfully created port: 19f522ca-cb0e-4101-be1f-6aa522ceb6f7 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.563148] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 624.572024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a37f953f-8cbe-4e81-96ad-585a0c4df11a tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.572024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.254s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.572024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.572024] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 624.572024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.630s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.579503] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9f4f82-4dc9-44ff-9d1a-b1fb7cb012e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.594603] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249ef6bc-884f-43ae-8e48-1c560a20407b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.611082] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128cd6e0-2f10-4bd6-a296-fe64700b6bb2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.617898] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cf1ac2-7537-45cb-a4f0-643ab0b4f699 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.649186] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181454MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 624.649186] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.701574] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 624.702147] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 624.702360] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 624.702512] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.702768] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 624.703048] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.703219] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 624.703427] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 624.703585] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 624.703750] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 624.703910] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 624.704098] env[61648]: DEBUG nova.virt.hardware [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 624.704982] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9025e7-ead0-48b2-9c3c-558493756716 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.713227] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510dd45b-a9de-4485-b09c-a0d399302616 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.727080] env[61648]: ERROR nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Traceback (most recent call last): [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] yield resources [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self.driver.spawn(context, instance, image_meta, [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] vm_ref = self.build_virtual_machine(instance, [ 624.727080] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] for vif in network_info: [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] return self._sync_wrapper(fn, *args, **kwargs) [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self.wait() [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self[:] = self._gt.wait() [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] return self._exit_event.wait() [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 624.727458] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] current.throw(*self._exc) [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] result = function(*args, **kwargs) [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] return func(*args, **kwargs) [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] raise e [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] nwinfo = self.network_api.allocate_for_instance( [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] created_port_ids = self._update_ports_for_instance( [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] with excutils.save_and_reraise_exception(): [ 624.727823] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self.force_reraise() [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] raise self.value [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] updated_port = self._update_port( [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] _ensure_no_port_binding_failure(port) [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] raise exception.PortBindingFailed(port_id=port['id']) [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 624.728232] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] [ 624.728232] env[61648]: INFO nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Terminating instance [ 624.729481] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquiring lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.729636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquired lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.729832] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.786104] env[61648]: DEBUG nova.compute.manager [req-9a757e23-21b5-4396-adb3-2d932aff39b7 req-5dc531f5-45b1-45bb-b239-1a60e7768e4f service nova] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Received event network-vif-deleted-c58fb08c-955e-4c6c-a8d7-adb284aff803 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 625.260771] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.399887] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.486249] env[61648]: ERROR nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 625.486249] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.486249] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.486249] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.486249] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.486249] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.486249] env[61648]: ERROR nova.compute.manager raise self.value [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.486249] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 625.486249] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.486249] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 625.486774] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.486774] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 625.486774] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 625.486774] env[61648]: ERROR nova.compute.manager [ 625.486774] env[61648]: Traceback (most recent call last): [ 625.486774] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 625.486774] env[61648]: listener.cb(fileno) [ 625.486774] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.486774] env[61648]: result = function(*args, **kwargs) [ 625.486774] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.486774] env[61648]: return func(*args, **kwargs) [ 625.486774] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.486774] env[61648]: raise e [ 625.486774] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.486774] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 625.486774] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.486774] env[61648]: created_port_ids = self._update_ports_for_instance( [ 625.486774] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.486774] env[61648]: with excutils.save_and_reraise_exception(): [ 625.486774] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.486774] env[61648]: self.force_reraise() [ 625.486774] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.486774] env[61648]: raise self.value [ 625.486774] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.486774] env[61648]: updated_port = self._update_port( [ 625.486774] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.486774] env[61648]: _ensure_no_port_binding_failure(port) [ 625.486774] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.486774] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 625.487573] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 625.487573] env[61648]: Removing descriptor: 19 [ 625.566728] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706b8601-2e52-4f00-bd82-5d624f52233c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.575038] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058d7a14-f824-4265-a34f-b4b0586257af {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.579768] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 625.611701] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10cedf8-f7a7-4056-b52f-d32aaf879925 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.618850] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6aa0a4e-b24c-4212-8b27-3b364fc19235 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.624853] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 625.625112] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 625.625271] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.625450] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 625.625595] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.625747] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 625.626086] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 625.626265] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 625.626444] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 625.626602] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 625.626761] env[61648]: DEBUG nova.virt.hardware [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 625.627503] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb2d9d2-3735-4245-a2fd-08c944566f58 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.639424] env[61648]: DEBUG nova.compute.provider_tree [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.644507] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01300b71-8345-4c25-ab63-932854f66868 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.660408] env[61648]: ERROR nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Traceback (most recent call last): [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] yield resources [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self.driver.spawn(context, instance, image_meta, [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] vm_ref = self.build_virtual_machine(instance, [ 625.660408] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] for vif in network_info: [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] return self._sync_wrapper(fn, *args, **kwargs) [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self.wait() [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self[:] = self._gt.wait() [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] return self._exit_event.wait() [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 625.660910] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] current.throw(*self._exc) [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] result = function(*args, **kwargs) [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] return func(*args, **kwargs) [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] raise e [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] nwinfo = self.network_api.allocate_for_instance( [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] created_port_ids = self._update_ports_for_instance( [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] with excutils.save_and_reraise_exception(): [ 625.661286] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self.force_reraise() [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] raise self.value [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] updated_port = self._update_port( [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] _ensure_no_port_binding_failure(port) [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] raise exception.PortBindingFailed(port_id=port['id']) [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 625.661608] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] [ 625.661608] env[61648]: INFO nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Terminating instance [ 625.663104] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquiring lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.663268] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquired lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.663430] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.901419] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Releasing lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.901981] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 625.902314] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42cd2365-f050-4c53-be18-b5b657fd987c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.911950] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff79389-fe6b-4d84-9b0a-82120f8f3e3f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.935381] env[61648]: WARNING nova.virt.vmwareapi.driver [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance baf240b6-0a42-485f-9176-78dda5de3c7d could not be found. [ 625.935619] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 625.936385] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bc35b93-67b6-49c8-bf0e-5732a39096e4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.945190] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd40491-ff2a-4905-8744-a9020c731969 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.966934] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance baf240b6-0a42-485f-9176-78dda5de3c7d could not be found. [ 625.967257] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.967501] env[61648]: INFO nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Took 0.07 seconds to destroy the instance on the hypervisor. [ 625.967802] env[61648]: DEBUG oslo.service.loopingcall [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.968084] env[61648]: DEBUG nova.compute.manager [-] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 625.968249] env[61648]: DEBUG nova.network.neutron [-] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.993889] env[61648]: DEBUG nova.network.neutron [-] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.145214] env[61648]: DEBUG nova.scheduler.client.report [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.186672] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.279639] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.496696] env[61648]: DEBUG nova.network.neutron [-] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.653016] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.079s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.653016] env[61648]: ERROR nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Traceback (most recent call last): [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self.driver.spawn(context, instance, image_meta, [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.653016] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] vm_ref = self.build_virtual_machine(instance, [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] for vif in network_info: [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return self._sync_wrapper(fn, *args, **kwargs) [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self.wait() [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self[:] = self._gt.wait() [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return self._exit_event.wait() [ 626.653512] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] result = hub.switch() [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return self.greenlet.switch() [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] result = function(*args, **kwargs) [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] return func(*args, **kwargs) [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] raise e [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] nwinfo = self.network_api.allocate_for_instance( [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.653800] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] created_port_ids = self._update_ports_for_instance( [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] with excutils.save_and_reraise_exception(): [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] self.force_reraise() [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] raise self.value [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] updated_port = self._update_port( [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] _ensure_no_port_binding_failure(port) [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.654102] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] raise exception.PortBindingFailed(port_id=port['id']) [ 626.654367] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] nova.exception.PortBindingFailed: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. [ 626.654367] env[61648]: ERROR nova.compute.manager [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] [ 626.654367] env[61648]: DEBUG nova.compute.utils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 626.654367] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Build of instance 25168ea9-24fb-4f63-b508-b5c3a47a77e4 was re-scheduled: Binding failed for port 960e68d2-2f43-4240-b0d5-ff4a2af28a6c, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 626.654367] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 626.654519] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquiring lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.654629] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Acquired lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.654729] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.655937] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.654s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.656195] env[61648]: DEBUG nova.objects.instance [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lazy-loading 'resources' on Instance uuid e918b827-ea37-4589-8999-e363aba4492d {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 626.784207] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Releasing lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.784661] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 626.785047] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 626.785363] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef2e2828-d38a-4baf-82ff-a87a036bab91 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.795458] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cede36f1-ed6d-48e2-822c-6b42cefe686a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.817647] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8 could not be found. [ 626.818056] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 626.818316] env[61648]: INFO nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 626.818797] env[61648]: DEBUG oslo.service.loopingcall [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.820167] env[61648]: DEBUG nova.compute.manager [-] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 626.820320] env[61648]: DEBUG nova.network.neutron [-] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.822696] env[61648]: DEBUG nova.compute.manager [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Received event network-changed-19f522ca-cb0e-4101-be1f-6aa522ceb6f7 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 626.822931] env[61648]: DEBUG nova.compute.manager [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Refreshing instance network info cache due to event network-changed-19f522ca-cb0e-4101-be1f-6aa522ceb6f7. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 626.823193] env[61648]: DEBUG oslo_concurrency.lockutils [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] Acquiring lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.823369] env[61648]: DEBUG oslo_concurrency.lockutils [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] Acquired lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.823560] env[61648]: DEBUG nova.network.neutron [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Refreshing network info cache for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 626.838607] env[61648]: DEBUG nova.network.neutron [-] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.000822] env[61648]: INFO nova.compute.manager [-] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Took 1.03 seconds to deallocate network for instance. [ 627.179431] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.236606] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.340623] env[61648]: DEBUG nova.network.neutron [-] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.357601] env[61648]: DEBUG nova.network.neutron [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.459962] env[61648]: DEBUG nova.network.neutron [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.566943] env[61648]: INFO nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Took 0.57 seconds to detach 1 volumes for instance. [ 627.570527] env[61648]: DEBUG nova.compute.claims [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 627.570695] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.677305] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35cf9f9-8489-4456-b5f5-9db343297e9d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.684898] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d06cfa1-5918-4cb2-8608-593074bb3a73 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.715685] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b0576f-b48c-4718-ab0f-6ff326668fc0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.723130] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f99c080-b7c4-494b-8488-9ab6e2c54109 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.736286] env[61648]: DEBUG nova.compute.provider_tree [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.740458] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Releasing lock "refresh_cache-25168ea9-24fb-4f63-b508-b5c3a47a77e4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.740667] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 627.740829] env[61648]: DEBUG nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 627.740993] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.757510] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.843175] env[61648]: INFO nova.compute.manager [-] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Took 1.02 seconds to deallocate network for instance. [ 627.846848] env[61648]: DEBUG nova.compute.claims [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 627.846848] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.965631] env[61648]: DEBUG oslo_concurrency.lockutils [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] Releasing lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.965947] env[61648]: DEBUG nova.compute.manager [req-bafb3c19-27cc-46a3-b633-4da1e6a3929f req-50093b8e-472c-4985-9268-6016ac140fa9 service nova] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Received event network-vif-deleted-19f522ca-cb0e-4101-be1f-6aa522ceb6f7 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 628.240239] env[61648]: DEBUG nova.scheduler.client.report [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.260647] env[61648]: DEBUG nova.network.neutron [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.747404] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.089s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.752870] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.572s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.763136] env[61648]: INFO nova.compute.manager [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] [instance: 25168ea9-24fb-4f63-b508-b5c3a47a77e4] Took 1.02 seconds to deallocate network for instance. [ 628.776033] env[61648]: INFO nova.scheduler.client.report [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Deleted allocations for instance e918b827-ea37-4589-8999-e363aba4492d [ 629.285553] env[61648]: DEBUG oslo_concurrency.lockutils [None req-780bea60-72f0-49c8-aee4-676cc219dcba tempest-ServerShowV257Test-1897748483 tempest-ServerShowV257Test-1897748483-project-member] Lock "e918b827-ea37-4589-8999-e363aba4492d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.064s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.769451] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe969f2-35c7-468a-81ce-ddba6f0f2dbb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.777026] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d00c5a-700c-4bf7-9e09-9c41f0507e95 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.820282] env[61648]: INFO nova.scheduler.client.report [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Deleted allocations for instance 25168ea9-24fb-4f63-b508-b5c3a47a77e4 [ 629.825882] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f578efe7-fa90-41eb-92aa-e11b32a4abe1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.836018] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ab07a0-6651-4700-aedd-a49c072b1566 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.854558] env[61648]: DEBUG nova.compute.provider_tree [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.331870] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dc7c448f-0a94-4a35-8437-71690dbc1980 tempest-DeleteServersAdminTestJSON-1152706989 tempest-DeleteServersAdminTestJSON-1152706989-project-member] Lock "25168ea9-24fb-4f63-b508-b5c3a47a77e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.341s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.358172] env[61648]: DEBUG nova.scheduler.client.report [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.835969] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 630.863292] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.113s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.863971] env[61648]: ERROR nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Traceback (most recent call last): [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self.driver.spawn(context, instance, image_meta, [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] vm_ref = self.build_virtual_machine(instance, [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] vif_infos = vmwarevif.get_vif_info(self._session, [ 630.863971] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] for vif in network_info: [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return self._sync_wrapper(fn, *args, **kwargs) [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self.wait() [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self[:] = self._gt.wait() [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return self._exit_event.wait() [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] result = hub.switch() [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 630.864811] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return self.greenlet.switch() [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] result = function(*args, **kwargs) [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] return func(*args, **kwargs) [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] raise e [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] nwinfo = self.network_api.allocate_for_instance( [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] created_port_ids = self._update_ports_for_instance( [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] with excutils.save_and_reraise_exception(): [ 630.865393] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] self.force_reraise() [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] raise self.value [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] updated_port = self._update_port( [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] _ensure_no_port_binding_failure(port) [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] raise exception.PortBindingFailed(port_id=port['id']) [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] nova.exception.PortBindingFailed: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. [ 630.866105] env[61648]: ERROR nova.compute.manager [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] [ 630.866380] env[61648]: DEBUG nova.compute.utils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 630.866660] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Build of instance d23a9ab2-01ed-4d41-b89f-445ecc5f410f was re-scheduled: Binding failed for port d69f405b-b65c-42c6-b5ce-eafc37eb1e8b, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 630.867107] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 630.867350] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.867499] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquired lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.867661] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.868690] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.108s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.871260] env[61648]: INFO nova.compute.claims [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.367284] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.403799] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.547660] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.049532] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Releasing lock "refresh_cache-d23a9ab2-01ed-4d41-b89f-445ecc5f410f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.049532] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 632.049532] env[61648]: DEBUG nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 632.049858] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 632.068772] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.412862] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3ef1e9-292c-4dff-8f55-58d10da82ce6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.424991] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d9f70d-bc68-44f2-9f5d-84bdb96d2b26 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.458117] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1573d5-9fc4-47df-ab75-05485bbcf288 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.465800] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3ce2e9-0a6c-4508-9951-ac5acc69862d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.483547] env[61648]: DEBUG nova.compute.provider_tree [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.575368] env[61648]: DEBUG nova.network.neutron [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.988450] env[61648]: DEBUG nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 633.077898] env[61648]: INFO nova.compute.manager [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: d23a9ab2-01ed-4d41-b89f-445ecc5f410f] Took 1.03 seconds to deallocate network for instance. [ 633.497369] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.497918] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 633.504555] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.197s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.006856] env[61648]: DEBUG nova.compute.utils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 634.009154] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 634.009357] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 634.109033] env[61648]: DEBUG nova.policy [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '213b71f9ead149d49a1b7c225fd18734', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceea39aefdd6484eb850a4d1b073cda6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 634.152466] env[61648]: INFO nova.scheduler.client.report [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Deleted allocations for instance d23a9ab2-01ed-4d41-b89f-445ecc5f410f [ 634.469994] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b8e06a-fa86-4982-bcaf-3b320128e3fd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.477911] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528f0491-975a-48e8-af60-f05306bfca9f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.517730] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 634.521981] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acb82e4-46b9-4e4f-af83-d9c9d8beb451 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.529685] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af0e43b-846f-40f2-b9ec-a708043332cb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.547196] env[61648]: DEBUG nova.compute.provider_tree [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.667782] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d0103739-78b5-422a-863c-267b3f657039 tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "d23a9ab2-01ed-4d41-b89f-445ecc5f410f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.930s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.987064] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Successfully created port: ca493fc8-9cca-4390-af7b-6371a56f9bc5 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 635.050701] env[61648]: DEBUG nova.scheduler.client.report [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.171917] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 635.539745] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 635.556204] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.052s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.556839] env[61648]: ERROR nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Traceback (most recent call last): [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self.driver.spawn(context, instance, image_meta, [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] vm_ref = self.build_virtual_machine(instance, [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.556839] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] for vif in network_info: [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return self._sync_wrapper(fn, *args, **kwargs) [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self.wait() [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self[:] = self._gt.wait() [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return self._exit_event.wait() [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] result = hub.switch() [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.557184] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return self.greenlet.switch() [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] result = function(*args, **kwargs) [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] return func(*args, **kwargs) [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] raise e [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] nwinfo = self.network_api.allocate_for_instance( [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] created_port_ids = self._update_ports_for_instance( [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] with excutils.save_and_reraise_exception(): [ 635.557490] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] self.force_reraise() [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] raise self.value [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] updated_port = self._update_port( [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] _ensure_no_port_binding_failure(port) [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] raise exception.PortBindingFailed(port_id=port['id']) [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] nova.exception.PortBindingFailed: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. [ 635.557796] env[61648]: ERROR nova.compute.manager [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] [ 635.558111] env[61648]: DEBUG nova.compute.utils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.560222] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Build of instance d1713c19-45cc-4d33-8b23-a9516bbaa25f was re-scheduled: Binding failed for port 9aff1da9-b577-425f-993c-4076d53a7eb0, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 635.560289] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 635.560488] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.560664] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.560827] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.563752] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.773s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.578477] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 635.578837] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 635.578934] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 635.579043] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 635.579196] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 635.579341] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 635.579577] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 635.579751] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 635.580058] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 635.580793] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 635.580793] env[61648]: DEBUG nova.virt.hardware [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 635.581666] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36807aca-f249-44c9-af6a-672e75282301 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.590477] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626d943f-2b15-41c4-a81b-707602d672f7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.709586] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.104064] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.281138] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.438386] env[61648]: DEBUG nova.compute.manager [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Received event network-changed-ca493fc8-9cca-4390-af7b-6371a56f9bc5 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 636.438386] env[61648]: DEBUG nova.compute.manager [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Refreshing instance network info cache due to event network-changed-ca493fc8-9cca-4390-af7b-6371a56f9bc5. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 636.439033] env[61648]: DEBUG oslo_concurrency.lockutils [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] Acquiring lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.439033] env[61648]: DEBUG oslo_concurrency.lockutils [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] Acquired lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.439033] env[61648]: DEBUG nova.network.neutron [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Refreshing network info cache for port ca493fc8-9cca-4390-af7b-6371a56f9bc5 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.533474] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquiring lock "bb9f6dc4-fd06-4cb5-984f-c938ed901772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.533474] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Lock "bb9f6dc4-fd06-4cb5-984f-c938ed901772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.549490] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b589cee-3377-4226-8893-bbac11c3b55f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.557669] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a032981-9df7-4b05-a9f8-77477c98b0c8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.597951] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431fa2bd-627c-4ae7-a522-644c70d35353 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.610333] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e843923f-3b38-4c04-8830-e0bc114de38d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.625795] env[61648]: DEBUG nova.compute.provider_tree [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.669516] env[61648]: ERROR nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 636.669516] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.669516] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 636.669516] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 636.669516] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.669516] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.669516] env[61648]: ERROR nova.compute.manager raise self.value [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 636.669516] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 636.669516] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.669516] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 636.670284] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.670284] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 636.670284] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 636.670284] env[61648]: ERROR nova.compute.manager [ 636.670284] env[61648]: Traceback (most recent call last): [ 636.670284] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 636.670284] env[61648]: listener.cb(fileno) [ 636.670284] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.670284] env[61648]: result = function(*args, **kwargs) [ 636.670284] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 636.670284] env[61648]: return func(*args, **kwargs) [ 636.670284] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 636.670284] env[61648]: raise e [ 636.670284] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.670284] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 636.670284] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 636.670284] env[61648]: created_port_ids = self._update_ports_for_instance( [ 636.670284] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 636.670284] env[61648]: with excutils.save_and_reraise_exception(): [ 636.670284] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.670284] env[61648]: self.force_reraise() [ 636.670284] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.670284] env[61648]: raise self.value [ 636.670284] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 636.670284] env[61648]: updated_port = self._update_port( [ 636.670284] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.670284] env[61648]: _ensure_no_port_binding_failure(port) [ 636.670284] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.670284] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 636.671224] env[61648]: nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 636.671224] env[61648]: Removing descriptor: 19 [ 636.671224] env[61648]: ERROR nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Traceback (most recent call last): [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] yield resources [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self.driver.spawn(context, instance, image_meta, [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 636.671224] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] vm_ref = self.build_virtual_machine(instance, [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] vif_infos = vmwarevif.get_vif_info(self._session, [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] for vif in network_info: [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return self._sync_wrapper(fn, *args, **kwargs) [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self.wait() [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self[:] = self._gt.wait() [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return self._exit_event.wait() [ 636.671527] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] result = hub.switch() [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return self.greenlet.switch() [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] result = function(*args, **kwargs) [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return func(*args, **kwargs) [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] raise e [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] nwinfo = self.network_api.allocate_for_instance( [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 636.671834] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] created_port_ids = self._update_ports_for_instance( [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] with excutils.save_and_reraise_exception(): [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self.force_reraise() [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] raise self.value [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] updated_port = self._update_port( [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] _ensure_no_port_binding_failure(port) [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.672150] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] raise exception.PortBindingFailed(port_id=port['id']) [ 636.672430] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 636.672430] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] [ 636.672430] env[61648]: INFO nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Terminating instance [ 636.672896] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquiring lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.784656] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-d1713c19-45cc-4d33-8b23-a9516bbaa25f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.784974] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 636.785174] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 636.785347] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.818373] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.960363] env[61648]: DEBUG nova.network.neutron [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.079456] env[61648]: DEBUG nova.network.neutron [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.130153] env[61648]: DEBUG nova.scheduler.client.report [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.321699] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.583957] env[61648]: DEBUG oslo_concurrency.lockutils [req-238afbc2-ed5b-4d5a-8950-c7010b8af126 req-b072a1d4-596c-47f6-8768-34801264451e service nova] Releasing lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.585479] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquired lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.585479] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.637108] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.069s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.637108] env[61648]: ERROR nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Traceback (most recent call last): [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self.driver.spawn(context, instance, image_meta, [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 637.637108] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] vm_ref = self.build_virtual_machine(instance, [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] for vif in network_info: [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return self._sync_wrapper(fn, *args, **kwargs) [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self.wait() [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self[:] = self._gt.wait() [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return self._exit_event.wait() [ 637.637365] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] result = hub.switch() [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return self.greenlet.switch() [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] result = function(*args, **kwargs) [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] return func(*args, **kwargs) [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] raise e [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] nwinfo = self.network_api.allocate_for_instance( [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 637.637661] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] created_port_ids = self._update_ports_for_instance( [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] with excutils.save_and_reraise_exception(): [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] self.force_reraise() [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] raise self.value [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] updated_port = self._update_port( [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] _ensure_no_port_binding_failure(port) [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.637987] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] raise exception.PortBindingFailed(port_id=port['id']) [ 637.638298] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] nova.exception.PortBindingFailed: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. [ 637.638298] env[61648]: ERROR nova.compute.manager [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] [ 637.638298] env[61648]: DEBUG nova.compute.utils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 637.638298] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.633s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.641471] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Build of instance 87971b67-572c-4d5f-99b7-dab08aea10c1 was re-scheduled: Binding failed for port 2e4836a2-279b-447b-bb49-e1d15644d2bc, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 637.641886] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 637.642143] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquiring lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.642268] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Acquired lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.642419] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.819930] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.820773] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.826022] env[61648]: INFO nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: d1713c19-45cc-4d33-8b23-a9516bbaa25f] Took 1.04 seconds to deallocate network for instance. [ 638.104066] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.186760] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.220133] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.284731] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.528179] env[61648]: DEBUG nova.compute.manager [req-7f9329cc-1d1a-432d-a9ab-71b12b8f2d37 req-d0782244-2464-4ed8-b62b-dceb1ab34b92 service nova] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Received event network-vif-deleted-ca493fc8-9cca-4390-af7b-6371a56f9bc5 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 638.680124] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279c38ab-0aaa-41be-8f07-ee1c45946d57 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.688054] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7a1f37-f2ff-48dd-a3ba-8a0d5a9de57e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.723185] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Releasing lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.723672] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 638.723871] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.724358] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82463217-f277-40de-83fe-7b6e80140f80 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.726871] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6940b1-da53-45bf-ae1f-6be6f84f1cdc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.735327] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a79264-f740-402a-ac18-e22ef5fab6d0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.741908] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bca035c-fa3e-49a0-9239-585b4e2e19ac {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.762286] env[61648]: DEBUG nova.compute.provider_tree [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.767224] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c35f0f15-6c13-4fbe-9ac2-ab6262590c38 could not be found. [ 638.767438] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 638.767614] env[61648]: INFO nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Took 0.04 seconds to destroy the instance on the hypervisor. [ 638.767846] env[61648]: DEBUG oslo.service.loopingcall [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.768289] env[61648]: DEBUG nova.compute.manager [-] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.768380] env[61648]: DEBUG nova.network.neutron [-] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 638.783712] env[61648]: DEBUG nova.network.neutron [-] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.789228] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Releasing lock "refresh_cache-87971b67-572c-4d5f-99b7-dab08aea10c1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.789228] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 638.789540] env[61648]: DEBUG nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.789540] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 638.804790] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.857012] env[61648]: INFO nova.scheduler.client.report [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Deleted allocations for instance d1713c19-45cc-4d33-8b23-a9516bbaa25f [ 639.270173] env[61648]: DEBUG nova.scheduler.client.report [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 639.288681] env[61648]: DEBUG nova.network.neutron [-] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.307375] env[61648]: DEBUG nova.network.neutron [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.365280] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "d1713c19-45cc-4d33-8b23-a9516bbaa25f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.313s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.781695] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.146s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.782770] env[61648]: ERROR nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Traceback (most recent call last): [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self.driver.spawn(context, instance, image_meta, [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] vm_ref = self.build_virtual_machine(instance, [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] vif_infos = vmwarevif.get_vif_info(self._session, [ 639.782770] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] for vif in network_info: [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] return self._sync_wrapper(fn, *args, **kwargs) [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self.wait() [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self[:] = self._gt.wait() [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] return self._exit_event.wait() [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] current.throw(*self._exc) [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 639.783127] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] result = function(*args, **kwargs) [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] return func(*args, **kwargs) [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] raise e [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] nwinfo = self.network_api.allocate_for_instance( [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] created_port_ids = self._update_ports_for_instance( [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] with excutils.save_and_reraise_exception(): [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] self.force_reraise() [ 639.783426] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] raise self.value [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] updated_port = self._update_port( [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] _ensure_no_port_binding_failure(port) [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] raise exception.PortBindingFailed(port_id=port['id']) [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] nova.exception.PortBindingFailed: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. [ 639.783732] env[61648]: ERROR nova.compute.manager [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] [ 639.783732] env[61648]: DEBUG nova.compute.utils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 639.784952] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.300s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.787878] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Build of instance 3cd90969-d884-49fc-a2c3-8501e2c51ff6 was re-scheduled: Binding failed for port e219b76e-a60a-4f74-977c-d18997d31538, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 639.788381] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 639.788576] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.788725] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.788880] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.791051] env[61648]: INFO nova.compute.manager [-] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Took 1.02 seconds to deallocate network for instance. [ 639.792773] env[61648]: DEBUG nova.compute.claims [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 639.792955] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.811824] env[61648]: INFO nova.compute.manager [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] [instance: 87971b67-572c-4d5f-99b7-dab08aea10c1] Took 1.02 seconds to deallocate network for instance. [ 639.868182] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 640.323042] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.394893] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.452431] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.753352] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3341cce9-23de-4c07-97e1-7010c72018eb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.761767] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdafad6-68c5-4625-bc33-7b003d3f0eae {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.797160] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01498b6-1678-4d7e-b5a5-178258ec97a7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.806066] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f833d06c-dd7e-47d1-9246-7b2b84efab8d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.820569] env[61648]: DEBUG nova.compute.provider_tree [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.844643] env[61648]: INFO nova.scheduler.client.report [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Deleted allocations for instance 87971b67-572c-4d5f-99b7-dab08aea10c1 [ 640.956628] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-3cd90969-d884-49fc-a2c3-8501e2c51ff6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.957021] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 640.957293] env[61648]: DEBUG nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 640.958291] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.977268] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.326515] env[61648]: DEBUG nova.scheduler.client.report [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 641.363124] env[61648]: DEBUG oslo_concurrency.lockutils [None req-7ebb1764-7163-4922-a255-cd14aef582c6 tempest-MultipleCreateTestJSON-921078991 tempest-MultipleCreateTestJSON-921078991-project-member] Lock "87971b67-572c-4d5f-99b7-dab08aea10c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.270s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.480758] env[61648]: DEBUG nova.network.neutron [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.832823] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.048s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.833799] env[61648]: ERROR nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Traceback (most recent call last): [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self.driver.spawn(context, instance, image_meta, [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] vm_ref = self.build_virtual_machine(instance, [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.833799] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] for vif in network_info: [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] return self._sync_wrapper(fn, *args, **kwargs) [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self.wait() [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self[:] = self._gt.wait() [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] return self._exit_event.wait() [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] current.throw(*self._exc) [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.834367] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] result = function(*args, **kwargs) [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] return func(*args, **kwargs) [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] raise e [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] nwinfo = self.network_api.allocate_for_instance( [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] created_port_ids = self._update_ports_for_instance( [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] with excutils.save_and_reraise_exception(): [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] self.force_reraise() [ 641.834826] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] raise self.value [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] updated_port = self._update_port( [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] _ensure_no_port_binding_failure(port) [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] raise exception.PortBindingFailed(port_id=port['id']) [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] nova.exception.PortBindingFailed: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. [ 641.835923] env[61648]: ERROR nova.compute.manager [instance: 49284c45-b77b-4992-8437-d9d31ba12539] [ 641.835923] env[61648]: DEBUG nova.compute.utils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 641.836403] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.746s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.837821] env[61648]: INFO nova.compute.claims [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.841520] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Build of instance 49284c45-b77b-4992-8437-d9d31ba12539 was re-scheduled: Binding failed for port a13ef10b-4efa-4047-9b34-ae3bc97e857b, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 641.841520] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 641.841875] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquiring lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.842049] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Acquired lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.842213] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 641.865203] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 641.987738] env[61648]: INFO nova.compute.manager [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 3cd90969-d884-49fc-a2c3-8501e2c51ff6] Took 1.03 seconds to deallocate network for instance. [ 642.371898] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.388195] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.478200] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.564780] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquiring lock "ffb6b3e0-5602-4c28-958d-22265337e236" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.565319] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Lock "ffb6b3e0-5602-4c28-958d-22265337e236" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.984821] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Releasing lock "refresh_cache-49284c45-b77b-4992-8437-d9d31ba12539" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.985282] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 642.985387] env[61648]: DEBUG nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 642.985620] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.020916] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.039097] env[61648]: INFO nova.scheduler.client.report [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Deleted allocations for instance 3cd90969-d884-49fc-a2c3-8501e2c51ff6 [ 643.372692] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4937e1-a65c-41ea-91e1-f4e14e23dd12 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.381892] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf655b0a-6b0e-411b-9088-71b365798ceb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.416059] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea40e864-6209-40a9-b98d-aa0b2674bde7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.423541] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f2f08b-4c21-479a-8bb0-aed8d8b9c8e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.440222] env[61648]: DEBUG nova.compute.provider_tree [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.527088] env[61648]: DEBUG nova.network.neutron [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.551265] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3981eaab-eb70-4af9-8d16-5b1a020a36a5 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "3cd90969-d884-49fc-a2c3-8501e2c51ff6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.836s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.945483] env[61648]: DEBUG nova.scheduler.client.report [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.031836] env[61648]: INFO nova.compute.manager [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] [instance: 49284c45-b77b-4992-8437-d9d31ba12539] Took 1.05 seconds to deallocate network for instance. [ 644.053590] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 644.450804] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.451344] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.456522] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.805s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.588153] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.698583] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "6d9ab9ac-d892-47e7-9b86-a2dce40a4568" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.698808] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "6d9ab9ac-d892-47e7-9b86-a2dce40a4568" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.961160] env[61648]: DEBUG nova.compute.utils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 644.965545] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 644.965840] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.017936] env[61648]: DEBUG nova.policy [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34b1268755c345b490f3509712d3f080', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '195458d7c0d44614a0a7c4b7d8c1367f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.064644] env[61648]: INFO nova.scheduler.client.report [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Deleted allocations for instance 49284c45-b77b-4992-8437-d9d31ba12539 [ 645.372446] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Successfully created port: 57f36687-144b-4979-9033-2ba72b84fdc2 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.465529] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 645.492552] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance baf240b6-0a42-485f-9176-78dda5de3c7d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.492696] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.492830] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance c35f0f15-6c13-4fbe-9ac2-ab6262590c38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.493147] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.574462] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e9544dd8-70e2-421c-97d2-ad06824473a3 tempest-InstanceActionsTestJSON-317559836 tempest-InstanceActionsTestJSON-317559836-project-member] Lock "49284c45-b77b-4992-8437-d9d31ba12539" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.056s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.997031] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 8c8a339c-e52a-4257-9191-4e03ecf87b22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 646.077574] env[61648]: DEBUG nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 646.480499] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 646.501128] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 646.519338] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.519606] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.519741] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.519917] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.521631] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.521850] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.522086] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.525227] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.525449] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.525701] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.526009] env[61648]: DEBUG nova.virt.hardware [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.528569] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e31a65-c167-4cfa-80be-af0d70d85652 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.537833] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4667c431-cdb1-4b95-a734-25b4631e45b4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.601418] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.008461] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 33e270fd-0393-4425-8312-1e9fc91f3d1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 647.168259] env[61648]: DEBUG nova.compute.manager [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Received event network-changed-57f36687-144b-4979-9033-2ba72b84fdc2 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.168259] env[61648]: DEBUG nova.compute.manager [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Refreshing instance network info cache due to event network-changed-57f36687-144b-4979-9033-2ba72b84fdc2. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 647.168259] env[61648]: DEBUG oslo_concurrency.lockutils [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] Acquiring lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.168259] env[61648]: DEBUG oslo_concurrency.lockutils [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] Acquired lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.168259] env[61648]: DEBUG nova.network.neutron [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Refreshing network info cache for port 57f36687-144b-4979-9033-2ba72b84fdc2 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.501659] env[61648]: ERROR nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 647.501659] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.501659] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.501659] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.501659] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.501659] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.501659] env[61648]: ERROR nova.compute.manager raise self.value [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.501659] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 647.501659] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.501659] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 647.502169] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.502169] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 647.502169] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 647.502169] env[61648]: ERROR nova.compute.manager [ 647.502169] env[61648]: Traceback (most recent call last): [ 647.502169] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 647.502169] env[61648]: listener.cb(fileno) [ 647.502169] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.502169] env[61648]: result = function(*args, **kwargs) [ 647.502169] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.502169] env[61648]: return func(*args, **kwargs) [ 647.502169] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.502169] env[61648]: raise e [ 647.502169] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.502169] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 647.502169] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.502169] env[61648]: created_port_ids = self._update_ports_for_instance( [ 647.502169] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.502169] env[61648]: with excutils.save_and_reraise_exception(): [ 647.502169] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.502169] env[61648]: self.force_reraise() [ 647.502169] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.502169] env[61648]: raise self.value [ 647.502169] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.502169] env[61648]: updated_port = self._update_port( [ 647.502169] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.502169] env[61648]: _ensure_no_port_binding_failure(port) [ 647.502169] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.502169] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 647.503122] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 647.503122] env[61648]: Removing descriptor: 16 [ 647.503122] env[61648]: ERROR nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Traceback (most recent call last): [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] yield resources [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self.driver.spawn(context, instance, image_meta, [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.503122] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] vm_ref = self.build_virtual_machine(instance, [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] for vif in network_info: [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return self._sync_wrapper(fn, *args, **kwargs) [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self.wait() [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self[:] = self._gt.wait() [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return self._exit_event.wait() [ 647.503518] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] result = hub.switch() [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return self.greenlet.switch() [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] result = function(*args, **kwargs) [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return func(*args, **kwargs) [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] raise e [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] nwinfo = self.network_api.allocate_for_instance( [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.503918] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] created_port_ids = self._update_ports_for_instance( [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] with excutils.save_and_reraise_exception(): [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self.force_reraise() [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] raise self.value [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] updated_port = self._update_port( [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] _ensure_no_port_binding_failure(port) [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.504362] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] raise exception.PortBindingFailed(port_id=port['id']) [ 647.504675] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 647.504675] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] [ 647.504675] env[61648]: INFO nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Terminating instance [ 647.509426] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.511818] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance be7776f1-0083-4c40-a7e6-477c0c65f7bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 647.603926] env[61648]: DEBUG nova.network.neutron [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.751573] env[61648]: DEBUG nova.network.neutron [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.016036] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 9008460a-6b35-468d-803c-d10c139494f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.258237] env[61648]: DEBUG oslo_concurrency.lockutils [req-d1e605a3-d957-472b-9662-927823babe65 req-0d3aa934-4d0a-43b2-96db-e4dda87f7ef1 service nova] Releasing lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.258237] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquired lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.258237] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.524345] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 55630bdb-fe38-49dc-baa2-2ac5de20e569 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.559824] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquiring lock "26534fec-8cf8-4cdd-a91f-e63afabc0d57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.559954] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Lock "26534fec-8cf8-4cdd-a91f-e63afabc0d57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.784624] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.869737] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.030291] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 458302d3-123c-47e8-bee8-6fe1462d5f4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 649.126809] env[61648]: DEBUG nova.compute.manager [req-f5489846-c4c2-47ff-9e6b-c0c6b974440c req-0f0bad5c-b699-48aa-b861-4c914448158e service nova] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Received event network-vif-deleted-57f36687-144b-4979-9033-2ba72b84fdc2 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 649.372182] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Releasing lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.372862] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 649.373105] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 649.373421] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2faf56c4-9dc2-4ea5-a26b-045d0f10d2db {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.382692] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b177ae-c18e-4aeb-b0cb-4d34472e1a8a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.404662] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c could not be found. [ 649.405074] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 649.405266] env[61648]: INFO nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 649.405559] env[61648]: DEBUG oslo.service.loopingcall [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.405825] env[61648]: DEBUG nova.compute.manager [-] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 649.405960] env[61648]: DEBUG nova.network.neutron [-] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 649.426588] env[61648]: DEBUG nova.network.neutron [-] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.534484] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 46672a70-7d6b-4a86-833b-a7583c71e595 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 649.929563] env[61648]: DEBUG nova.network.neutron [-] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.036748] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance a9d3592b-56f7-4823-bf0c-8b92ac4587bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.438255] env[61648]: INFO nova.compute.manager [-] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Took 1.03 seconds to deallocate network for instance. [ 650.441214] env[61648]: DEBUG nova.compute.claims [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 650.441214] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.542157] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance b07648a0-23a5-4dee-9582-ce393292b768 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 651.045389] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance e9d5a8b8-afc2-40dc-b480-0b946e085e18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 651.551935] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance a1a8b990-f4b7-4049-9345-562d1b5c180e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.059643] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance a978aa73-3f2a-4a87-bda3-bcde3028a646 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.565853] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 0a321a24-0f87-47e7-8364-5da5f6a65131 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.059551] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "d4b580e9-aae2-4c14-abd8-c6a08f0a576c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.060618] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "d4b580e9-aae2-4c14-abd8-c6a08f0a576c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.068351] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.577132] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 959931f5-eebc-4544-af88-ea231301b4a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.079333] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 831da774-5e37-4d49-a1fd-3eb421c7fcb7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.581777] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 14854fd0-680a-48a2-b1d6-50e75624aef2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.086243] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance b9130bac-f92b-4208-b84c-852f4a269153 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.588981] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance d76d8aed-9126-4d21-9df9-6317c3b19f65 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.092314] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance bb9f6dc4-fd06-4cb5-984f-c938ed901772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.598370] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.101482] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance ffb6b3e0-5602-4c28-958d-22265337e236 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.604459] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 6d9ab9ac-d892-47e7-9b86-a2dce40a4568 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.604790] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 657.604855] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 658.025957] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5fb540-7670-43cd-8156-2f49e7292e50 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.036360] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47df66ed-ea0e-473d-8f9c-7c13f15f268d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.067604] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e01c6b-05e9-4d8f-900a-4d23bed16222 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.075010] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15de0098-8336-4fad-ba0b-b4e67335762a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.090519] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.595091] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.102020] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 659.102020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 14.646s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.102020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.529s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.027391] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dc6d0a-1daf-4ecc-b480-be15f0634102 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.037993] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6da434-f7fb-481a-b44e-75910ff95e58 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.072046] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21170336-c525-4cde-b0da-1277a2b1ab0c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.081451] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28593c3e-aacc-42b9-a3b3-05cfe4eccdde {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.095144] env[61648]: DEBUG nova.compute.provider_tree [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.601023] env[61648]: DEBUG nova.scheduler.client.report [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 661.106255] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.106255] env[61648]: ERROR nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Traceback (most recent call last): [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self.driver.spawn(context, instance, image_meta, [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.106255] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] vm_ref = self.build_virtual_machine(instance, [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] for vif in network_info: [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] return self._sync_wrapper(fn, *args, **kwargs) [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self.wait() [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self[:] = self._gt.wait() [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] return self._exit_event.wait() [ 661.106722] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] current.throw(*self._exc) [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] result = function(*args, **kwargs) [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] return func(*args, **kwargs) [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] raise e [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] nwinfo = self.network_api.allocate_for_instance( [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] created_port_ids = self._update_ports_for_instance( [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 661.107098] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] with excutils.save_and_reraise_exception(): [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] self.force_reraise() [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] raise self.value [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] updated_port = self._update_port( [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] _ensure_no_port_binding_failure(port) [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] raise exception.PortBindingFailed(port_id=port['id']) [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] nova.exception.PortBindingFailed: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. [ 661.107413] env[61648]: ERROR nova.compute.manager [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] [ 661.107709] env[61648]: DEBUG nova.compute.utils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 661.112737] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 33.263s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.112737] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Build of instance baf240b6-0a42-485f-9176-78dda5de3c7d was re-scheduled: Binding failed for port c58fb08c-955e-4c6c-a8d7-adb284aff803, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 661.112737] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 661.112737] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquiring lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.114345] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Acquired lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.114345] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.635014] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.707424] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.930509] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cc2d3a-dfcc-4beb-9b16-2a31253d755c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.938540] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0221a810-4f08-4e61-969e-3586ac01f060 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.969304] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c5e472-1e5e-4be7-816b-5b4a98ed3761 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.976796] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4ed3f2-1b94-48d8-9d2b-713e754da63d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.990229] env[61648]: DEBUG nova.compute.provider_tree [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.212179] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Releasing lock "refresh_cache-baf240b6-0a42-485f-9176-78dda5de3c7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.212428] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 662.212608] env[61648]: DEBUG nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 662.212776] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.236951] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.493239] env[61648]: DEBUG nova.scheduler.client.report [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 662.739758] env[61648]: DEBUG nova.network.neutron [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.998524] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.890s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.999156] env[61648]: ERROR nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Traceback (most recent call last): [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self.driver.spawn(context, instance, image_meta, [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] vm_ref = self.build_virtual_machine(instance, [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.999156] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] for vif in network_info: [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] return self._sync_wrapper(fn, *args, **kwargs) [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self.wait() [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self[:] = self._gt.wait() [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] return self._exit_event.wait() [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] current.throw(*self._exc) [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.999782] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] result = function(*args, **kwargs) [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] return func(*args, **kwargs) [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] raise e [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] nwinfo = self.network_api.allocate_for_instance( [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] created_port_ids = self._update_ports_for_instance( [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] with excutils.save_and_reraise_exception(): [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] self.force_reraise() [ 663.000328] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] raise self.value [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] updated_port = self._update_port( [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] _ensure_no_port_binding_failure(port) [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] raise exception.PortBindingFailed(port_id=port['id']) [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] nova.exception.PortBindingFailed: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. [ 663.001117] env[61648]: ERROR nova.compute.manager [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] [ 663.001117] env[61648]: DEBUG nova.compute.utils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 663.002085] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.634s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.002899] env[61648]: INFO nova.compute.claims [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.012605] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Build of instance 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8 was re-scheduled: Binding failed for port 19f522ca-cb0e-4101-be1f-6aa522ceb6f7, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 663.012605] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 663.012605] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquiring lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.012605] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Acquired lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.012819] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.248239] env[61648]: INFO nova.compute.manager [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] [instance: baf240b6-0a42-485f-9176-78dda5de3c7d] Took 1.03 seconds to deallocate network for instance. [ 663.538980] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.609056] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.111202] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Releasing lock "refresh_cache-453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.111516] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 664.111558] env[61648]: DEBUG nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 664.112271] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.126311] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.275419] env[61648]: INFO nova.scheduler.client.report [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Deleted allocations for instance baf240b6-0a42-485f-9176-78dda5de3c7d [ 664.364027] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dda70f0-31fe-458c-89e1-fa310c2de4df {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.372508] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7abe66-b12e-4068-b846-488eb4228a48 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.406752] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d142fc5d-750f-4680-b5a3-9407b0b84846 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.414081] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc2cca0-8455-45aa-9784-4737d7634b7b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.427541] env[61648]: DEBUG nova.compute.provider_tree [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.628451] env[61648]: DEBUG nova.network.neutron [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.786717] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a803db38-f2df-4959-8fc5-30a0f10bfd66 tempest-ServersTestBootFromVolume-73967423 tempest-ServersTestBootFromVolume-73967423-project-member] Lock "baf240b6-0a42-485f-9176-78dda5de3c7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.578s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.932032] env[61648]: DEBUG nova.scheduler.client.report [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.132244] env[61648]: INFO nova.compute.manager [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] [instance: 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8] Took 1.02 seconds to deallocate network for instance. [ 665.288832] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 665.438662] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.439498] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 665.443705] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.735s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.448285] env[61648]: INFO nova.compute.claims [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.831133] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.958417] env[61648]: DEBUG nova.compute.utils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 665.959735] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 665.959900] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 666.031735] env[61648]: DEBUG nova.policy [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05baa26c79e1430c9945bfa82bd802dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '413caef8b4b34ad49a8aa707ca007dbd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 666.173717] env[61648]: INFO nova.scheduler.client.report [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Deleted allocations for instance 453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8 [ 666.463077] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 666.537272] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Successfully created port: b73a1922-a46f-4870-be29-d33b5b919064 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.691906] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3f3d705f-0e61-4a77-b7e2-c0f7c7934b89 tempest-AttachInterfacesV270Test-1094926256 tempest-AttachInterfacesV270Test-1094926256-project-member] Lock "453d71d6-e8ab-4fdb-b5e7-f84bb2020bf8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.728s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.900506] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0f2c8c-668c-4dba-aa3a-ad64936102b9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.908409] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfedddf5-dda4-4f12-8026-4b295d328f46 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.942352] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e9be6b-89bd-4c59-8948-2d2428b0e59f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.951683] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491bdc53-350d-4401-961d-9bc70ade73b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.967477] env[61648]: DEBUG nova.compute.provider_tree [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.197582] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 667.473729] env[61648]: DEBUG nova.scheduler.client.report [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.484076] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 667.512579] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 667.513061] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 667.513382] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.514055] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 667.514055] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.514055] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 667.514265] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 667.514512] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 667.515316] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 667.515316] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 667.515316] env[61648]: DEBUG nova.virt.hardware [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 667.516421] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63a9dda-b433-4607-84cb-613261113c63 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.524851] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52879cc5-d09c-4db8-b5e3-d8ad98303aa0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.717580] env[61648]: DEBUG nova.compute.manager [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Received event network-changed-b73a1922-a46f-4870-be29-d33b5b919064 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 667.717770] env[61648]: DEBUG nova.compute.manager [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Refreshing instance network info cache due to event network-changed-b73a1922-a46f-4870-be29-d33b5b919064. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 667.717975] env[61648]: DEBUG oslo_concurrency.lockutils [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] Acquiring lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.718122] env[61648]: DEBUG oslo_concurrency.lockutils [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] Acquired lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.718276] env[61648]: DEBUG nova.network.neutron [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Refreshing network info cache for port b73a1922-a46f-4870-be29-d33b5b919064 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.736559] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.946514] env[61648]: ERROR nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 667.946514] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.946514] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 667.946514] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 667.946514] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.946514] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.946514] env[61648]: ERROR nova.compute.manager raise self.value [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 667.946514] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 667.946514] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.946514] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 667.946993] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.946993] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 667.946993] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 667.946993] env[61648]: ERROR nova.compute.manager [ 667.946993] env[61648]: Traceback (most recent call last): [ 667.946993] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 667.946993] env[61648]: listener.cb(fileno) [ 667.946993] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 667.946993] env[61648]: result = function(*args, **kwargs) [ 667.946993] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 667.946993] env[61648]: return func(*args, **kwargs) [ 667.946993] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 667.946993] env[61648]: raise e [ 667.946993] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.946993] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 667.946993] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 667.946993] env[61648]: created_port_ids = self._update_ports_for_instance( [ 667.946993] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 667.946993] env[61648]: with excutils.save_and_reraise_exception(): [ 667.946993] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.946993] env[61648]: self.force_reraise() [ 667.946993] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.946993] env[61648]: raise self.value [ 667.946993] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 667.946993] env[61648]: updated_port = self._update_port( [ 667.946993] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.946993] env[61648]: _ensure_no_port_binding_failure(port) [ 667.946993] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.946993] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 667.947731] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 667.947731] env[61648]: Removing descriptor: 16 [ 667.947731] env[61648]: ERROR nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Traceback (most recent call last): [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] yield resources [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self.driver.spawn(context, instance, image_meta, [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 667.947731] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] vm_ref = self.build_virtual_machine(instance, [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] vif_infos = vmwarevif.get_vif_info(self._session, [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] for vif in network_info: [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return self._sync_wrapper(fn, *args, **kwargs) [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self.wait() [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self[:] = self._gt.wait() [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return self._exit_event.wait() [ 667.948047] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] result = hub.switch() [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return self.greenlet.switch() [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] result = function(*args, **kwargs) [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return func(*args, **kwargs) [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] raise e [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] nwinfo = self.network_api.allocate_for_instance( [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 667.948382] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] created_port_ids = self._update_ports_for_instance( [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] with excutils.save_and_reraise_exception(): [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self.force_reraise() [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] raise self.value [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] updated_port = self._update_port( [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] _ensure_no_port_binding_failure(port) [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.948711] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] raise exception.PortBindingFailed(port_id=port['id']) [ 667.949013] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 667.949013] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] [ 667.949013] env[61648]: INFO nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Terminating instance [ 667.950294] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.979778] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.979835] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 667.985899] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.192s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.245191] env[61648]: DEBUG nova.network.neutron [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.359055] env[61648]: DEBUG nova.network.neutron [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.498424] env[61648]: DEBUG nova.compute.utils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 668.503564] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 668.503910] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.567761] env[61648]: DEBUG nova.policy [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34b1268755c345b490f3509712d3f080', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '195458d7c0d44614a0a7c4b7d8c1367f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 668.862101] env[61648]: DEBUG oslo_concurrency.lockutils [req-99c9f474-d2e2-4c94-be62-c4d4c1d0c9e9 req-a60b4213-138d-4cc1-b9bc-389f61228c1b service nova] Releasing lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.862485] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.862686] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.957811] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02abc457-b10a-419a-bbd3-b7a63d74b646 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.965914] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102b696e-ef2f-44b1-83fd-41d1d04eb5b5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.998758] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0359fd-dc88-459a-a0bf-035ad375226c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.008170] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 669.013686] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc1293f-5d79-4880-8bba-51af78aec5f5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.029572] env[61648]: DEBUG nova.compute.provider_tree [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 669.073378] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Successfully created port: b728f6c3-5089-47a3-9f3c-190de17af12b {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.392508] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.553197] env[61648]: ERROR nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [req-3bec6c27-a5ae-485a-ba98-57360ce07f75] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3bec6c27-a5ae-485a-ba98-57360ce07f75"}]}: nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 669.575201] env[61648]: DEBUG nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 669.589718] env[61648]: DEBUG nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 669.589945] env[61648]: DEBUG nova.compute.provider_tree [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 669.602977] env[61648]: DEBUG nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 669.628511] env[61648]: DEBUG nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 669.721353] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.815203] env[61648]: DEBUG nova.compute.manager [req-a35491f6-90b3-4e9e-940d-b0d822f950e9 req-dde6576e-9949-4e5e-b40f-728c618c71ad service nova] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Received event network-vif-deleted-b73a1922-a46f-4870-be29-d33b5b919064 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 670.019241] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 670.040999] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8236859-353c-4273-b53c-372c6962d00d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.052719] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e12d3e5-0fc3-44ba-8d2d-1c857f79cfc3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.087733] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 670.088061] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 670.088228] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.088412] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 670.088557] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.088702] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 670.088905] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 670.089074] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 670.089241] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 670.089406] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 670.089566] env[61648]: DEBUG nova.virt.hardware [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 670.090986] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab298e5b-bc08-4a8d-bf5f-cb213bba64b8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.094131] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e16891-7d1f-4e2c-9a24-f0303a5fa50a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.104515] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6250def4-3e5d-4daf-ad80-dd2703635b4c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.110263] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a33560-4f11-4c86-bb56-b40a111ee494 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.139726] env[61648]: DEBUG nova.compute.provider_tree [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 670.225361] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.225972] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 670.226118] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.226352] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-402812b3-97b9-4495-a137-c3ab9d0bb486 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.237935] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2354a6a3-8a82-4c82-85ce-550532a63e58 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.259812] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8c8a339c-e52a-4257-9191-4e03ecf87b22 could not be found. [ 670.260120] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.260318] env[61648]: INFO nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Took 0.03 seconds to destroy the instance on the hypervisor. [ 670.261032] env[61648]: DEBUG oslo.service.loopingcall [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.261108] env[61648]: DEBUG nova.compute.manager [-] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 670.261200] env[61648]: DEBUG nova.network.neutron [-] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.293103] env[61648]: DEBUG nova.network.neutron [-] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.419024] env[61648]: ERROR nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 670.419024] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.419024] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.419024] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.419024] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.419024] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.419024] env[61648]: ERROR nova.compute.manager raise self.value [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.419024] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 670.419024] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.419024] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 670.419985] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.419985] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 670.419985] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 670.419985] env[61648]: ERROR nova.compute.manager [ 670.419985] env[61648]: Traceback (most recent call last): [ 670.419985] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 670.419985] env[61648]: listener.cb(fileno) [ 670.419985] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.419985] env[61648]: result = function(*args, **kwargs) [ 670.419985] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.419985] env[61648]: return func(*args, **kwargs) [ 670.419985] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.419985] env[61648]: raise e [ 670.419985] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.419985] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 670.419985] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.419985] env[61648]: created_port_ids = self._update_ports_for_instance( [ 670.419985] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.419985] env[61648]: with excutils.save_and_reraise_exception(): [ 670.419985] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.419985] env[61648]: self.force_reraise() [ 670.419985] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.419985] env[61648]: raise self.value [ 670.419985] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.419985] env[61648]: updated_port = self._update_port( [ 670.419985] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.419985] env[61648]: _ensure_no_port_binding_failure(port) [ 670.419985] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.419985] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 670.421135] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 670.421135] env[61648]: Removing descriptor: 16 [ 670.421135] env[61648]: ERROR nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Traceback (most recent call last): [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] yield resources [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self.driver.spawn(context, instance, image_meta, [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.421135] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] vm_ref = self.build_virtual_machine(instance, [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] for vif in network_info: [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return self._sync_wrapper(fn, *args, **kwargs) [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self.wait() [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self[:] = self._gt.wait() [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return self._exit_event.wait() [ 670.421630] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] result = hub.switch() [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return self.greenlet.switch() [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] result = function(*args, **kwargs) [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return func(*args, **kwargs) [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] raise e [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] nwinfo = self.network_api.allocate_for_instance( [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.422174] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] created_port_ids = self._update_ports_for_instance( [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] with excutils.save_and_reraise_exception(): [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self.force_reraise() [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] raise self.value [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] updated_port = self._update_port( [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] _ensure_no_port_binding_failure(port) [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.422688] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] raise exception.PortBindingFailed(port_id=port['id']) [ 670.424468] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 670.424468] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] [ 670.424468] env[61648]: INFO nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Terminating instance [ 670.424468] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.424468] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquired lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.427302] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.679191] env[61648]: DEBUG nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 670.682213] env[61648]: DEBUG nova.compute.provider_tree [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 68 to 69 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 670.682213] env[61648]: DEBUG nova.compute.provider_tree [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 670.795655] env[61648]: DEBUG nova.network.neutron [-] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.948266] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.103745] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.186854] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.199s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.186854] env[61648]: ERROR nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Traceback (most recent call last): [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self.driver.spawn(context, instance, image_meta, [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 671.186854] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] vm_ref = self.build_virtual_machine(instance, [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] vif_infos = vmwarevif.get_vif_info(self._session, [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] for vif in network_info: [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return self._sync_wrapper(fn, *args, **kwargs) [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self.wait() [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self[:] = self._gt.wait() [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return self._exit_event.wait() [ 671.187257] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] result = hub.switch() [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return self.greenlet.switch() [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] result = function(*args, **kwargs) [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] return func(*args, **kwargs) [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] raise e [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] nwinfo = self.network_api.allocate_for_instance( [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.187788] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] created_port_ids = self._update_ports_for_instance( [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] with excutils.save_and_reraise_exception(): [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] self.force_reraise() [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] raise self.value [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] updated_port = self._update_port( [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] _ensure_no_port_binding_failure(port) [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.188198] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] raise exception.PortBindingFailed(port_id=port['id']) [ 671.188811] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] nova.exception.PortBindingFailed: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. [ 671.188811] env[61648]: ERROR nova.compute.manager [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] [ 671.188811] env[61648]: DEBUG nova.compute.utils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 671.193024] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Build of instance c35f0f15-6c13-4fbe-9ac2-ab6262590c38 was re-scheduled: Binding failed for port ca493fc8-9cca-4390-af7b-6371a56f9bc5, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 671.193024] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 671.193024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquiring lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.193024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Acquired lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.193398] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.193398] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.798s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.195489] env[61648]: INFO nova.compute.claims [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 671.298881] env[61648]: INFO nova.compute.manager [-] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Took 1.04 seconds to deallocate network for instance. [ 671.305233] env[61648]: DEBUG nova.compute.claims [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 671.305660] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.609143] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Releasing lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.609143] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 671.609143] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 671.609143] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6279a213-2e2f-4c49-8155-8c9a0cc0a07a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.621240] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3137ab46-8600-4afd-b52d-62ac988a86a6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.654887] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22 could not be found. [ 671.654887] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 671.654887] env[61648]: INFO nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Took 0.05 seconds to destroy the instance on the hypervisor. [ 671.655166] env[61648]: DEBUG oslo.service.loopingcall [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.655434] env[61648]: DEBUG nova.compute.manager [-] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.655527] env[61648]: DEBUG nova.network.neutron [-] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.683344] env[61648]: DEBUG nova.network.neutron [-] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.729589] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.894563] env[61648]: DEBUG nova.compute.manager [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Received event network-changed-b728f6c3-5089-47a3-9f3c-190de17af12b {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 671.894945] env[61648]: DEBUG nova.compute.manager [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Refreshing instance network info cache due to event network-changed-b728f6c3-5089-47a3-9f3c-190de17af12b. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 671.895359] env[61648]: DEBUG oslo_concurrency.lockutils [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] Acquiring lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.896274] env[61648]: DEBUG oslo_concurrency.lockutils [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] Acquired lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.896430] env[61648]: DEBUG nova.network.neutron [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Refreshing network info cache for port b728f6c3-5089-47a3-9f3c-190de17af12b {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.986424] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.186247] env[61648]: DEBUG nova.network.neutron [-] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.427043] env[61648]: DEBUG nova.network.neutron [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.493913] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Releasing lock "refresh_cache-c35f0f15-6c13-4fbe-9ac2-ab6262590c38" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.494721] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 672.495922] env[61648]: DEBUG nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 672.495922] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.526032] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.685841] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081c9158-aad4-4bb5-ac2a-c51442b70277 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.690235] env[61648]: INFO nova.compute.manager [-] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Took 1.03 seconds to deallocate network for instance. [ 672.696257] env[61648]: DEBUG nova.compute.claims [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 672.696257] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.696257] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf5d23f-9cd8-4577-a764-03ee8eb03a06 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.727199] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a578d048-cb9b-444f-8e57-80b4d357d9d5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.734885] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0bd00d-bf25-4659-8114-324baba212f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.749061] env[61648]: DEBUG nova.compute.provider_tree [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.801117] env[61648]: DEBUG nova.network.neutron [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.030268] env[61648]: DEBUG nova.network.neutron [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.252919] env[61648]: DEBUG nova.scheduler.client.report [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 673.305809] env[61648]: DEBUG oslo_concurrency.lockutils [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] Releasing lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.305809] env[61648]: DEBUG nova.compute.manager [req-de50ee6b-f7a0-4b63-b379-8d70c6c831a3 req-b43f16ed-6cfe-4d2c-b61b-316a875dd820 service nova] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Received event network-vif-deleted-b728f6c3-5089-47a3-9f3c-190de17af12b {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 673.536016] env[61648]: INFO nova.compute.manager [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] [instance: c35f0f15-6c13-4fbe-9ac2-ab6262590c38] Took 1.04 seconds to deallocate network for instance. [ 673.766104] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.766926] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 673.771499] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.383s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.773506] env[61648]: INFO nova.compute.claims [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.189660] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquiring lock "423b5f66-624b-49fe-9f65-9bd3318917c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.189899] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Lock "423b5f66-624b-49fe-9f65-9bd3318917c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.281157] env[61648]: DEBUG nova.compute.utils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 674.288641] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 674.288641] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 674.335591] env[61648]: DEBUG nova.policy [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a80af8b85dda488695129c59bccc9226', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ef957a5c37b4fc2ac5a0cfa84c9111d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 674.571899] env[61648]: INFO nova.scheduler.client.report [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Deleted allocations for instance c35f0f15-6c13-4fbe-9ac2-ab6262590c38 [ 674.662470] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Successfully created port: 96a8ab00-8cbd-4641-a7de-ddafd50bfe92 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.684768] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquiring lock "3ca295b7-50e2-4b6b-8033-991328a43f3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.685036] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Lock "3ca295b7-50e2-4b6b-8033-991328a43f3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.796060] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 675.089248] env[61648]: DEBUG oslo_concurrency.lockutils [None req-63dc2adf-8c6e-4d1e-95cf-0daaea522a6f tempest-AttachInterfacesUnderV243Test-1574430665 tempest-AttachInterfacesUnderV243Test-1574430665-project-member] Lock "c35f0f15-6c13-4fbe-9ac2-ab6262590c38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.610s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.258536] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bccb8a2-cc0e-42bd-b519-0cae4ed37311 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.266172] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528b7bd4-2787-48a0-8555-e753088a679e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.300135] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f75a89-2e14-4622-a8d5-45af49557b7c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.313198] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3edacd8-dbe8-439f-91b8-844096aa145d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.328695] env[61648]: DEBUG nova.compute.provider_tree [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.594599] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 675.645691] env[61648]: DEBUG nova.compute.manager [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Received event network-changed-96a8ab00-8cbd-4641-a7de-ddafd50bfe92 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 675.645877] env[61648]: DEBUG nova.compute.manager [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Refreshing instance network info cache due to event network-changed-96a8ab00-8cbd-4641-a7de-ddafd50bfe92. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 675.646113] env[61648]: DEBUG oslo_concurrency.lockutils [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] Acquiring lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.646242] env[61648]: DEBUG oslo_concurrency.lockutils [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] Acquired lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.646478] env[61648]: DEBUG nova.network.neutron [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Refreshing network info cache for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.812015] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 675.831089] env[61648]: DEBUG nova.scheduler.client.report [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.842962] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 675.842962] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 675.842962] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.843168] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 675.843168] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.843168] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 675.843168] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 675.843168] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 675.843654] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 675.843975] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 675.844700] env[61648]: DEBUG nova.virt.hardware [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 675.846737] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a667fe9-7328-43b9-9023-699fd3e17049 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.859414] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9902fee-23a9-4dfa-896a-f56ef9ad7dfc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.901960] env[61648]: ERROR nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 675.901960] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.901960] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.901960] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.901960] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.901960] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.901960] env[61648]: ERROR nova.compute.manager raise self.value [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.901960] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 675.901960] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.901960] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 675.902477] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.902477] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 675.902477] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 675.902477] env[61648]: ERROR nova.compute.manager [ 675.902477] env[61648]: Traceback (most recent call last): [ 675.902477] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 675.902477] env[61648]: listener.cb(fileno) [ 675.902477] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.902477] env[61648]: result = function(*args, **kwargs) [ 675.902477] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 675.902477] env[61648]: return func(*args, **kwargs) [ 675.902477] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.902477] env[61648]: raise e [ 675.902477] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.902477] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 675.902477] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.902477] env[61648]: created_port_ids = self._update_ports_for_instance( [ 675.902477] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.902477] env[61648]: with excutils.save_and_reraise_exception(): [ 675.902477] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.902477] env[61648]: self.force_reraise() [ 675.902477] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.902477] env[61648]: raise self.value [ 675.902477] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.902477] env[61648]: updated_port = self._update_port( [ 675.902477] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.902477] env[61648]: _ensure_no_port_binding_failure(port) [ 675.902477] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.902477] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 675.903375] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 675.903375] env[61648]: Removing descriptor: 16 [ 675.903375] env[61648]: ERROR nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Traceback (most recent call last): [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] yield resources [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self.driver.spawn(context, instance, image_meta, [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 675.903375] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] vm_ref = self.build_virtual_machine(instance, [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] for vif in network_info: [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return self._sync_wrapper(fn, *args, **kwargs) [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self.wait() [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self[:] = self._gt.wait() [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return self._exit_event.wait() [ 675.903730] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] result = hub.switch() [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return self.greenlet.switch() [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] result = function(*args, **kwargs) [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return func(*args, **kwargs) [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] raise e [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] nwinfo = self.network_api.allocate_for_instance( [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.904186] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] created_port_ids = self._update_ports_for_instance( [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] with excutils.save_and_reraise_exception(): [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self.force_reraise() [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] raise self.value [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] updated_port = self._update_port( [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] _ensure_no_port_binding_failure(port) [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.904498] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] raise exception.PortBindingFailed(port_id=port['id']) [ 675.904834] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 675.904834] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] [ 675.904834] env[61648]: INFO nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Terminating instance [ 675.905495] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquiring lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.124692] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.172206] env[61648]: DEBUG nova.network.neutron [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.295433] env[61648]: DEBUG nova.network.neutron [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.336319] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.336827] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 676.340279] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.752s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.345304] env[61648]: INFO nova.compute.claims [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.799694] env[61648]: DEBUG oslo_concurrency.lockutils [req-81d9b0bb-c96f-468d-a32c-0efc64fbe12b req-4dc9a9f5-abac-4c17-921a-8d168a9821f1 service nova] Releasing lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.800144] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquired lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.800392] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.850247] env[61648]: DEBUG nova.compute.utils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.855509] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 676.855509] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.972730] env[61648]: DEBUG nova.policy [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '364b47168dc544dc9f24bf99abd1bf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9fb970ae81348d9b75bc67c353bc8db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 677.329382] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.358601] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 677.465644] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.630525] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Successfully created port: 9342f0e9-5428-41eb-bd99-b28bcf9c0e45 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.770259] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e956c492-488d-4f59-9961-bf1fb10631c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.777706] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2271f6b0-b851-41b5-9cfb-af770e07c056 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.812644] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b12c282-9fd5-40c8-91bb-e43321d367b5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.821035] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51490f90-2d85-4912-a73f-c472d5692565 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.833753] env[61648]: DEBUG nova.compute.provider_tree [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.839804] env[61648]: DEBUG nova.compute.manager [req-0bd683a5-cf9e-4e96-b40b-ac0ab7f7a7ef req-f692f91f-fd77-442d-800f-56cc8467ed90 service nova] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Received event network-vif-deleted-96a8ab00-8cbd-4641-a7de-ddafd50bfe92 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.968762] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Releasing lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.969206] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 677.969398] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 677.969704] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73c595dd-4fa4-4092-958e-186e6161fe66 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.979976] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb941c2-9205-477f-b74b-4c12a345d174 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.001134] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 33e270fd-0393-4425-8312-1e9fc91f3d1f could not be found. [ 678.001429] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 678.001621] env[61648]: INFO nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 678.001861] env[61648]: DEBUG oslo.service.loopingcall [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 678.002090] env[61648]: DEBUG nova.compute.manager [-] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.002186] env[61648]: DEBUG nova.network.neutron [-] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 678.031062] env[61648]: DEBUG nova.network.neutron [-] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.336919] env[61648]: DEBUG nova.scheduler.client.report [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.376874] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 678.410939] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:36:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='dc23abcb-4205-495e-bf74-c104448596b0',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-315914052',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 678.411207] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 678.411357] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.411592] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 678.411816] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.412067] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 678.412296] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 678.412469] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 678.412655] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 678.412892] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 678.413095] env[61648]: DEBUG nova.virt.hardware [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.414050] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71173c9a-98df-4eaf-a73a-d7f4bc60e777 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.426533] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae2bce4-740c-4e5b-ab3c-83b30b15463a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.534058] env[61648]: DEBUG nova.network.neutron [-] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.843794] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.844339] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 678.849376] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.247s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.849473] env[61648]: INFO nova.compute.claims [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.902993] env[61648]: ERROR nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 678.902993] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.902993] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.902993] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.902993] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.902993] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.902993] env[61648]: ERROR nova.compute.manager raise self.value [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.902993] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 678.902993] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.902993] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 678.903483] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.903483] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 678.903483] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 678.903483] env[61648]: ERROR nova.compute.manager [ 678.903483] env[61648]: Traceback (most recent call last): [ 678.903483] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 678.903483] env[61648]: listener.cb(fileno) [ 678.903483] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.903483] env[61648]: result = function(*args, **kwargs) [ 678.903483] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.903483] env[61648]: return func(*args, **kwargs) [ 678.903483] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.903483] env[61648]: raise e [ 678.903483] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.903483] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 678.903483] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.903483] env[61648]: created_port_ids = self._update_ports_for_instance( [ 678.903483] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.903483] env[61648]: with excutils.save_and_reraise_exception(): [ 678.903483] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.903483] env[61648]: self.force_reraise() [ 678.903483] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.903483] env[61648]: raise self.value [ 678.903483] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.903483] env[61648]: updated_port = self._update_port( [ 678.903483] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.903483] env[61648]: _ensure_no_port_binding_failure(port) [ 678.903483] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.903483] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 678.904325] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 678.904325] env[61648]: Removing descriptor: 16 [ 678.904325] env[61648]: ERROR nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Traceback (most recent call last): [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] yield resources [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self.driver.spawn(context, instance, image_meta, [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.904325] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] vm_ref = self.build_virtual_machine(instance, [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] for vif in network_info: [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return self._sync_wrapper(fn, *args, **kwargs) [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self.wait() [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self[:] = self._gt.wait() [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return self._exit_event.wait() [ 678.904737] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] result = hub.switch() [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return self.greenlet.switch() [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] result = function(*args, **kwargs) [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return func(*args, **kwargs) [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] raise e [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] nwinfo = self.network_api.allocate_for_instance( [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.905148] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] created_port_ids = self._update_ports_for_instance( [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] with excutils.save_and_reraise_exception(): [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self.force_reraise() [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] raise self.value [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] updated_port = self._update_port( [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] _ensure_no_port_binding_failure(port) [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.905552] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] raise exception.PortBindingFailed(port_id=port['id']) [ 678.905905] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 678.905905] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] [ 678.905905] env[61648]: INFO nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Terminating instance [ 678.905905] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.905905] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquired lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.905905] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.036870] env[61648]: INFO nova.compute.manager [-] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Took 1.03 seconds to deallocate network for instance. [ 679.039667] env[61648]: DEBUG nova.compute.claims [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 679.039866] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.354252] env[61648]: DEBUG nova.compute.utils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 679.357708] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 679.357905] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 679.402643] env[61648]: DEBUG nova.policy [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e56b8866585c4c80b9269b3b9f41fd30', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46521bd6b495431b9a2f4a79f62a6a2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 679.426524] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.527392] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.680296] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Successfully created port: 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.862022] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 679.888467] env[61648]: DEBUG nova.compute.manager [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Received event network-changed-9342f0e9-5428-41eb-bd99-b28bcf9c0e45 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.892219] env[61648]: DEBUG nova.compute.manager [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Refreshing instance network info cache due to event network-changed-9342f0e9-5428-41eb-bd99-b28bcf9c0e45. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 679.892727] env[61648]: DEBUG oslo_concurrency.lockutils [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] Acquiring lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.033901] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Releasing lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.034500] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 680.034764] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 680.036237] env[61648]: DEBUG oslo_concurrency.lockutils [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] Acquired lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.036237] env[61648]: DEBUG nova.network.neutron [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Refreshing network info cache for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.037141] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eecaed3d-ed0b-4d8c-a0c4-3a52854a6ca4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.048941] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d6e45f-65ff-400c-8902-81ff78053199 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.075571] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance be7776f1-0083-4c40-a7e6-477c0c65f7bf could not be found. [ 680.075810] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.076024] env[61648]: INFO nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 680.076237] env[61648]: DEBUG oslo.service.loopingcall [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.078922] env[61648]: DEBUG nova.compute.manager [-] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.079076] env[61648]: DEBUG nova.network.neutron [-] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.179877] env[61648]: DEBUG nova.network.neutron [-] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.331279] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fae2d2d-fd66-4e41-a6b8-09da6653dbe4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.339388] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e4e721-162d-45a3-a264-63261da13544 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.379973] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64039e2f-085a-45f3-ad38-90149dd0c1b4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.387653] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c961514f-5797-45fa-bfed-bab2ca470d24 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.402028] env[61648]: DEBUG nova.compute.provider_tree [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.576581] env[61648]: DEBUG nova.network.neutron [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.677960] env[61648]: DEBUG nova.network.neutron [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.686032] env[61648]: DEBUG nova.network.neutron [-] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.772010] env[61648]: ERROR nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 680.772010] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.772010] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.772010] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.772010] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.772010] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.772010] env[61648]: ERROR nova.compute.manager raise self.value [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.772010] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 680.772010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.772010] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 680.772433] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.772433] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 680.772433] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 680.772433] env[61648]: ERROR nova.compute.manager [ 680.772433] env[61648]: Traceback (most recent call last): [ 680.772433] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 680.772433] env[61648]: listener.cb(fileno) [ 680.772433] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.772433] env[61648]: result = function(*args, **kwargs) [ 680.772433] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.772433] env[61648]: return func(*args, **kwargs) [ 680.772433] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.772433] env[61648]: raise e [ 680.772433] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.772433] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 680.772433] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.772433] env[61648]: created_port_ids = self._update_ports_for_instance( [ 680.772433] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.772433] env[61648]: with excutils.save_and_reraise_exception(): [ 680.772433] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.772433] env[61648]: self.force_reraise() [ 680.772433] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.772433] env[61648]: raise self.value [ 680.772433] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.772433] env[61648]: updated_port = self._update_port( [ 680.772433] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.772433] env[61648]: _ensure_no_port_binding_failure(port) [ 680.772433] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.772433] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 680.773278] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 680.773278] env[61648]: Removing descriptor: 16 [ 680.884177] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 680.907111] env[61648]: DEBUG nova.scheduler.client.report [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.915884] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 680.916136] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 680.916291] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.916471] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 680.916613] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.916758] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 680.916961] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 680.917703] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 680.917703] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 680.917703] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 680.917703] env[61648]: DEBUG nova.virt.hardware [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 680.919364] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a187580-f0ff-4f49-841a-63dcf12c2344 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.929465] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9896f999-58d0-4e5a-8f0e-cdba5393f527 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.946380] env[61648]: ERROR nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] Traceback (most recent call last): [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] yield resources [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self.driver.spawn(context, instance, image_meta, [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] vm_ref = self.build_virtual_machine(instance, [ 680.946380] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] for vif in network_info: [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] return self._sync_wrapper(fn, *args, **kwargs) [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self.wait() [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self[:] = self._gt.wait() [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] return self._exit_event.wait() [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 680.947146] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] current.throw(*self._exc) [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] result = function(*args, **kwargs) [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] return func(*args, **kwargs) [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] raise e [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] nwinfo = self.network_api.allocate_for_instance( [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] created_port_ids = self._update_ports_for_instance( [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] with excutils.save_and_reraise_exception(): [ 680.947632] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self.force_reraise() [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] raise self.value [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] updated_port = self._update_port( [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] _ensure_no_port_binding_failure(port) [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] raise exception.PortBindingFailed(port_id=port['id']) [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 680.951129] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] [ 680.951129] env[61648]: INFO nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Terminating instance [ 680.951569] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquiring lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.951569] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquired lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.951569] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.187285] env[61648]: DEBUG oslo_concurrency.lockutils [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] Releasing lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.187285] env[61648]: DEBUG nova.compute.manager [req-e629d316-7e13-4a1f-9005-56b7d41db698 req-59b92ab6-5ec4-48e8-a986-a5774b6c79a0 service nova] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Received event network-vif-deleted-9342f0e9-5428-41eb-bd99-b28bcf9c0e45 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.191527] env[61648]: INFO nova.compute.manager [-] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Took 1.11 seconds to deallocate network for instance. [ 681.198103] env[61648]: DEBUG nova.compute.claims [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 681.198303] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.411641] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.412267] env[61648]: DEBUG nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 681.414816] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.974s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.475655] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.577474] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.912474] env[61648]: DEBUG nova.compute.manager [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Received event network-changed-83dc9ed5-d8bf-41ea-a443-d86b41c7ee09 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.912474] env[61648]: DEBUG nova.compute.manager [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Refreshing instance network info cache due to event network-changed-83dc9ed5-d8bf-41ea-a443-d86b41c7ee09. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 681.912474] env[61648]: DEBUG oslo_concurrency.lockutils [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] Acquiring lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.919768] env[61648]: DEBUG nova.compute.utils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.928306] env[61648]: DEBUG nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 682.087022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Releasing lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.087022] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 682.087022] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.087022] env[61648]: DEBUG oslo_concurrency.lockutils [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] Acquired lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.087022] env[61648]: DEBUG nova.network.neutron [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Refreshing network info cache for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 682.087510] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df9a61c0-ba48-4e2b-bb7d-9d1adb9acbea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.099132] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d019cb6-67bb-4571-b191-98be9a11f935 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.124404] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9008460a-6b35-468d-803c-d10c139494f7 could not be found. [ 682.124682] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 682.124899] env[61648]: INFO nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 682.125193] env[61648]: DEBUG oslo.service.loopingcall [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.125501] env[61648]: DEBUG nova.compute.manager [-] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.125556] env[61648]: DEBUG nova.network.neutron [-] [instance: 9008460a-6b35-468d-803c-d10c139494f7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.152253] env[61648]: DEBUG nova.network.neutron [-] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.394140] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57f1d34-32a9-47d0-acb8-4adeac74ac63 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.402305] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bdc2fb-0ae9-4879-b73b-fae1785c0559 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.437074] env[61648]: DEBUG nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 682.441449] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b0ba75-8452-48bd-9ac8-7405b78de846 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.450228] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a5c6cb-f829-497f-be59-afadf92b78d0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.467139] env[61648]: DEBUG nova.compute.provider_tree [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.620383] env[61648]: DEBUG nova.network.neutron [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.654649] env[61648]: DEBUG nova.network.neutron [-] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.704975] env[61648]: DEBUG nova.network.neutron [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.973605] env[61648]: DEBUG nova.scheduler.client.report [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.157369] env[61648]: INFO nova.compute.manager [-] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Took 1.03 seconds to deallocate network for instance. [ 683.160119] env[61648]: DEBUG nova.compute.claims [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 683.160171] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.210929] env[61648]: DEBUG oslo_concurrency.lockutils [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] Releasing lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.211228] env[61648]: DEBUG nova.compute.manager [req-a7bff81e-084c-45a0-9ce0-e3635360920b req-4a1d48ab-28d2-414c-bc71-3106be87d5f3 service nova] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Received event network-vif-deleted-83dc9ed5-d8bf-41ea-a443-d86b41c7ee09 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 683.449130] env[61648]: DEBUG nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 683.479314] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.479578] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.479753] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.479935] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.480089] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.480236] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.480440] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.480595] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.480758] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.480914] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.482748] env[61648]: DEBUG nova.virt.hardware [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.483506] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.069s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.484115] env[61648]: ERROR nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Traceback (most recent call last): [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self.driver.spawn(context, instance, image_meta, [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] vm_ref = self.build_virtual_machine(instance, [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] vif_infos = vmwarevif.get_vif_info(self._session, [ 683.484115] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] for vif in network_info: [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return self._sync_wrapper(fn, *args, **kwargs) [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self.wait() [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self[:] = self._gt.wait() [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return self._exit_event.wait() [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] result = hub.switch() [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 683.484467] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return self.greenlet.switch() [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] result = function(*args, **kwargs) [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] return func(*args, **kwargs) [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] raise e [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] nwinfo = self.network_api.allocate_for_instance( [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] created_port_ids = self._update_ports_for_instance( [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] with excutils.save_and_reraise_exception(): [ 683.484841] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] self.force_reraise() [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] raise self.value [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] updated_port = self._update_port( [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] _ensure_no_port_binding_failure(port) [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] raise exception.PortBindingFailed(port_id=port['id']) [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] nova.exception.PortBindingFailed: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. [ 683.485249] env[61648]: ERROR nova.compute.manager [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] [ 683.485575] env[61648]: DEBUG nova.compute.utils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 683.488908] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e043472c-e6b4-4924-ac61-f8eb5b7f8df2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.496819] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Build of instance 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c was re-scheduled: Binding failed for port 57f36687-144b-4979-9033-2ba72b84fdc2, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 683.497299] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 683.497529] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.497676] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquired lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.497830] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.498899] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.668s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.500337] env[61648]: INFO nova.compute.claims [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.509752] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e31472-4274-42e5-9955-b6a6978ed2f0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.531438] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.537688] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Creating folder: Project (07dd79e05c844c43838122df39afb2d0). Parent ref: group-v285225. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.540683] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3499e309-3ad6-4f8a-b094-63e0f686e1c3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.550105] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Created folder: Project (07dd79e05c844c43838122df39afb2d0) in parent group-v285225. [ 683.550303] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Creating folder: Instances. Parent ref: group-v285237. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.551136] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d304a98-7047-4af7-8c44-a6410df7bf12 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.560181] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Created folder: Instances in parent group-v285237. [ 683.560671] env[61648]: DEBUG oslo.service.loopingcall [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.560671] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.560788] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f867cb90-92be-441a-9aa9-072fc80a3f8e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.577726] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.577726] env[61648]: value = "task-1336627" [ 683.577726] env[61648]: _type = "Task" [ 683.577726] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.585135] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336627, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.026745] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.087503] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336627, 'name': CreateVM_Task, 'duration_secs': 0.249389} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.087680] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 684.088121] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.088282] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.088591] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 684.088839] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbee4352-0444-4f3e-9fc0-fd3466184260 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.093504] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 684.093504] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e8c185-0a71-8b64-3bd4-742fb1fb43ca" [ 684.093504] env[61648]: _type = "Task" [ 684.093504] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.101330] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e8c185-0a71-8b64-3bd4-742fb1fb43ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.119824] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.604327] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e8c185-0a71-8b64-3bd4-742fb1fb43ca, 'name': SearchDatastore_Task, 'duration_secs': 0.01173} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.605772] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.605772] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.605772] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.605772] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.606100] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.611127] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3657b325-7a24-4596-9350-d06ec924b9bb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.616782] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.616978] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.618331] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb33769-779a-47a5-835e-b0940aec2558 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.623292] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Releasing lock "refresh_cache-376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.623518] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 684.623721] env[61648]: DEBUG nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 684.623904] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.627723] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 684.627723] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f65001-40a3-95ef-69f2-78aeb0cf0660" [ 684.627723] env[61648]: _type = "Task" [ 684.627723] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.636279] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f65001-40a3-95ef-69f2-78aeb0cf0660, 'name': SearchDatastore_Task, 'duration_secs': 0.008702} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.639742] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fdbd17f-6009-47ce-87c4-a2fd9795f6f5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.644988] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 684.644988] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]521e4bc9-fecc-a194-bd10-9eccabfda017" [ 684.644988] env[61648]: _type = "Task" [ 684.644988] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.649294] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.653238] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]521e4bc9-fecc-a194-bd10-9eccabfda017, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.920204] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193d55e3-f3a2-495c-a9d4-93636b3bf083 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.928360] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3407e8-5699-44a7-aa2d-867f0493aae2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.960467] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2633a9cb-2ccc-4f97-9e7a-11e6a1c5ae33 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.965203] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453a26d8-6bed-4bd6-a516-dca6e296cca8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.979181] env[61648]: DEBUG nova.compute.provider_tree [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.155336] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]521e4bc9-fecc-a194-bd10-9eccabfda017, 'name': SearchDatastore_Task, 'duration_secs': 0.008196} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.155606] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.155866] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.156251] env[61648]: DEBUG nova.network.neutron [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.158122] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5b86631-e09c-424c-ac29-94910e9de0a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.165216] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 685.165216] env[61648]: value = "task-1336628" [ 685.165216] env[61648]: _type = "Task" [ 685.165216] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.174153] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.483057] env[61648]: DEBUG nova.scheduler.client.report [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.660894] env[61648]: INFO nova.compute.manager [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c] Took 1.04 seconds to deallocate network for instance. [ 685.676871] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465312} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.684019] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.684019] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.684019] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c3a4365-85bb-427c-9ac7-2475d0f50bd0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.689037] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 685.689037] env[61648]: value = "task-1336629" [ 685.689037] env[61648]: _type = "Task" [ 685.689037] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.697765] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.990969] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.991471] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 685.996784] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.260s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.998223] env[61648]: INFO nova.compute.claims [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.202019] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059591} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.202019] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.202019] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1883558e-f50b-4d28-b72e-bc9f729e932f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.224148] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.224849] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d04842fd-e08c-41c0-8177-fea175e3dc36 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.247654] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 686.247654] env[61648]: value = "task-1336630" [ 686.247654] env[61648]: _type = "Task" [ 686.247654] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.260515] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336630, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.498860] env[61648]: DEBUG nova.compute.utils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 686.500309] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 686.500479] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 686.542736] env[61648]: DEBUG nova.policy [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '891a7ca87eed4f9eb8109e27d0270b93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa6eca973cdb458eaa7eec06767c17ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 686.694179] env[61648]: INFO nova.scheduler.client.report [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Deleted allocations for instance 376f4b46-b5e1-428d-ae7a-c4d9596c0a4c [ 686.756489] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336630, 'name': ReconfigVM_Task, 'duration_secs': 0.320626} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.756816] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.757431] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aee20579-dafc-415f-9799-0d87fb9f0f89 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.763688] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 686.763688] env[61648]: value = "task-1336631" [ 686.763688] env[61648]: _type = "Task" [ 686.763688] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.774982] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336631, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.850307] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Successfully created port: b1324f39-8021-4a1e-8858-95e8d192d6b3 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.004463] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 687.204696] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b71a04c8-a2aa-45d1-bf55-bf69c00f41b7 tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "376f4b46-b5e1-428d-ae7a-c4d9596c0a4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.820s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.276072] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336631, 'name': Rename_Task, 'duration_secs': 0.12533} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.278516] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.278985] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21aa0d76-9177-48a1-b5fd-ef759c43d305 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.285514] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 687.285514] env[61648]: value = "task-1336632" [ 687.285514] env[61648]: _type = "Task" [ 687.285514] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.293658] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.447192] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94d030e-d759-45b3-ab87-65e00b2d4ef8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.452869] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7040906f-00f7-40ae-8410-b0c2c939043b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.481967] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0045da-b54e-4214-a11e-69a17b5fe455 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.489737] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8e41b2-005e-43a2-899a-0c93e519f251 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.507916] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 687.707276] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 687.717223] env[61648]: DEBUG nova.compute.manager [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Received event network-changed-b1324f39-8021-4a1e-8858-95e8d192d6b3 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 687.720252] env[61648]: DEBUG nova.compute.manager [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Refreshing instance network info cache due to event network-changed-b1324f39-8021-4a1e-8858-95e8d192d6b3. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 687.720252] env[61648]: DEBUG oslo_concurrency.lockutils [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] Acquiring lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.720252] env[61648]: DEBUG oslo_concurrency.lockutils [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] Acquired lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.720252] env[61648]: DEBUG nova.network.neutron [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Refreshing network info cache for port b1324f39-8021-4a1e-8858-95e8d192d6b3 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.794900] env[61648]: DEBUG oslo_vmware.api [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336632, 'name': PowerOnVM_Task, 'duration_secs': 0.413829} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.795181] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.795370] env[61648]: INFO nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Took 4.35 seconds to spawn the instance on the hypervisor. [ 687.795539] env[61648]: DEBUG nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 687.796356] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f583b42d-ed33-4ba9-93ff-4fab4b541394 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.914483] env[61648]: ERROR nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 687.914483] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.914483] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.914483] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.914483] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.914483] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.914483] env[61648]: ERROR nova.compute.manager raise self.value [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.914483] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 687.914483] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.914483] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 687.915202] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.915202] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 687.915202] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 687.915202] env[61648]: ERROR nova.compute.manager [ 687.915202] env[61648]: Traceback (most recent call last): [ 687.915202] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 687.915202] env[61648]: listener.cb(fileno) [ 687.915202] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.915202] env[61648]: result = function(*args, **kwargs) [ 687.915202] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.915202] env[61648]: return func(*args, **kwargs) [ 687.915202] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.915202] env[61648]: raise e [ 687.915202] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.915202] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 687.915202] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.915202] env[61648]: created_port_ids = self._update_ports_for_instance( [ 687.915202] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.915202] env[61648]: with excutils.save_and_reraise_exception(): [ 687.915202] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.915202] env[61648]: self.force_reraise() [ 687.915202] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.915202] env[61648]: raise self.value [ 687.915202] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.915202] env[61648]: updated_port = self._update_port( [ 687.915202] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.915202] env[61648]: _ensure_no_port_binding_failure(port) [ 687.915202] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.915202] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 687.916443] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 687.916443] env[61648]: Removing descriptor: 16 [ 688.016199] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 688.027105] env[61648]: ERROR nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [req-15573473-20fe-45fd-886c-89cdd4a08c5f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-15573473-20fe-45fd-886c-89cdd4a08c5f"}]} [ 688.047019] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 688.047019] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 688.047019] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.047019] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 688.047341] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.047341] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 688.047341] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 688.047341] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 688.047341] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 688.047489] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 688.047489] env[61648]: DEBUG nova.virt.hardware [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 688.047489] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba4ffbb-05b4-40db-a073-fc3df99b330b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.050524] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 688.058884] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea0e20c-a6f5-445a-a8d6-42690e6216f0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.071555] env[61648]: ERROR nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Traceback (most recent call last): [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] yield resources [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self.driver.spawn(context, instance, image_meta, [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] vm_ref = self.build_virtual_machine(instance, [ 688.071555] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] vif_infos = vmwarevif.get_vif_info(self._session, [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] for vif in network_info: [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] return self._sync_wrapper(fn, *args, **kwargs) [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self.wait() [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self[:] = self._gt.wait() [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] return self._exit_event.wait() [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 688.071917] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] current.throw(*self._exc) [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] result = function(*args, **kwargs) [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] return func(*args, **kwargs) [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] raise e [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] nwinfo = self.network_api.allocate_for_instance( [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] created_port_ids = self._update_ports_for_instance( [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] with excutils.save_and_reraise_exception(): [ 688.072295] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self.force_reraise() [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] raise self.value [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] updated_port = self._update_port( [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] _ensure_no_port_binding_failure(port) [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] raise exception.PortBindingFailed(port_id=port['id']) [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 688.072631] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] [ 688.072631] env[61648]: INFO nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Terminating instance [ 688.075646] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.077142] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 688.077710] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.092929] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 688.116883] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 688.239030] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.311448] env[61648]: INFO nova.compute.manager [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Took 41.73 seconds to build instance. [ 688.362789] env[61648]: DEBUG nova.network.neutron [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.451688] env[61648]: DEBUG nova.network.neutron [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.518992] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8434c4-37bb-466f-ab03-3890c6f17dce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.526712] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818472b2-523b-4c17-b683-ec005491a0d1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.559748] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b197c20-6bfb-4749-8d41-6652d681873b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.571045] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c790e5d5-d964-446e-a9b3-acaa854d9630 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.584527] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.815487] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4fe0a0a7-44dd-4726-a8a6-1d2f7b6c83a2 tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "55630bdb-fe38-49dc-baa2-2ac5de20e569" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.569s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.954796] env[61648]: DEBUG oslo_concurrency.lockutils [req-4902f260-f2e0-4a95-bac9-1d818bccbe9f req-08d69cf6-dbbc-4cab-a5eb-609d39702cbc service nova] Releasing lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.955278] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquired lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.955446] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.067058] env[61648]: INFO nova.compute.manager [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Rebuilding instance [ 689.121499] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 71 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 689.121765] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 71 to 72 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 689.121999] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 689.135977] env[61648]: DEBUG nova.compute.manager [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 689.137064] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d2ac10-8547-4b15-988d-d8cd314ff179 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.320624] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 689.473862] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.544243] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.627084] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.630s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.627585] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 689.631242] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.326s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.647268] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.647542] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e27a44e-3155-472e-9bd0-4feb75a1431b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.655576] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 689.655576] env[61648]: value = "task-1336633" [ 689.655576] env[61648]: _type = "Task" [ 689.655576] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.664870] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.778484] env[61648]: DEBUG nova.compute.manager [req-dae6bc3d-bb20-4c12-a2ba-64bf0e3caecf req-105393d6-6c12-4d6d-97a6-7def0e5cb545 service nova] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Received event network-vif-deleted-b1324f39-8021-4a1e-8858-95e8d192d6b3 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 689.846859] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.049021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Releasing lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.049021] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 690.049021] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 690.049021] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eef90f49-0521-4fdc-ab7a-bc35a863403d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.056283] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922f6ca3-c2dc-44a8-8aac-78b55614c45f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.078752] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 458302d3-123c-47e8-bee8-6fe1462d5f4b could not be found. [ 690.078971] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.079159] env[61648]: INFO nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 690.079397] env[61648]: DEBUG oslo.service.loopingcall [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.079616] env[61648]: DEBUG nova.compute.manager [-] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 690.079703] env[61648]: DEBUG nova.network.neutron [-] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 690.112277] env[61648]: DEBUG nova.network.neutron [-] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.135912] env[61648]: DEBUG nova.compute.utils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 690.140074] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 690.140236] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.169098] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336633, 'name': PowerOffVM_Task, 'duration_secs': 0.108631} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.173996] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 690.173996] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 690.173996] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3727224e-c5f0-498a-83e2-e034c533b03e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.179814] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 690.180143] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48b34d8d-5b79-478a-9adb-12e946205ee1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.203113] env[61648]: DEBUG nova.policy [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '891a7ca87eed4f9eb8109e27d0270b93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa6eca973cdb458eaa7eec06767c17ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 690.205858] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 690.206061] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 690.206233] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Deleting the datastore file [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 690.206466] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-839e86ad-6783-4d61-bf0c-072ba0c8f694 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.212760] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 690.212760] env[61648]: value = "task-1336635" [ 690.212760] env[61648]: _type = "Task" [ 690.212760] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.224400] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.562526] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Successfully created port: cac25edb-f1d6-467a-ac0a-cb9f4f5f442d {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.567505] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397cf350-e980-4b95-be38-4782fd082f4f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.578044] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2b11a6-bd78-44a0-8a44-273b69aa49ff {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.609024] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac0f38c-14ba-4a10-b879-c0143cde1c36 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.616010] env[61648]: DEBUG nova.network.neutron [-] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.618322] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0480e3b-89c3-4d05-95dd-7922e7dc6f20 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.631659] env[61648]: DEBUG nova.compute.provider_tree [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.640768] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 690.727306] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107026} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.727727] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.728043] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.728328] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 691.123383] env[61648]: INFO nova.compute.manager [-] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Took 1.04 seconds to deallocate network for instance. [ 691.125722] env[61648]: DEBUG nova.compute.claims [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 691.125919] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.134872] env[61648]: DEBUG nova.scheduler.client.report [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 691.537490] env[61648]: ERROR nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 691.537490] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.537490] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.537490] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.537490] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.537490] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.537490] env[61648]: ERROR nova.compute.manager raise self.value [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.537490] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 691.537490] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.537490] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 691.537976] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.537976] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 691.537976] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 691.537976] env[61648]: ERROR nova.compute.manager [ 691.537976] env[61648]: Traceback (most recent call last): [ 691.537976] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 691.537976] env[61648]: listener.cb(fileno) [ 691.537976] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.537976] env[61648]: result = function(*args, **kwargs) [ 691.537976] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.537976] env[61648]: return func(*args, **kwargs) [ 691.537976] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.537976] env[61648]: raise e [ 691.537976] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.537976] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 691.537976] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.537976] env[61648]: created_port_ids = self._update_ports_for_instance( [ 691.537976] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.537976] env[61648]: with excutils.save_and_reraise_exception(): [ 691.537976] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.537976] env[61648]: self.force_reraise() [ 691.537976] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.537976] env[61648]: raise self.value [ 691.537976] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.537976] env[61648]: updated_port = self._update_port( [ 691.537976] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.537976] env[61648]: _ensure_no_port_binding_failure(port) [ 691.537976] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.537976] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 691.538825] env[61648]: nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 691.538825] env[61648]: Removing descriptor: 16 [ 691.642998] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.012s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.643659] env[61648]: ERROR nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Traceback (most recent call last): [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self.driver.spawn(context, instance, image_meta, [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] vm_ref = self.build_virtual_machine(instance, [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.643659] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] for vif in network_info: [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return self._sync_wrapper(fn, *args, **kwargs) [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self.wait() [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self[:] = self._gt.wait() [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return self._exit_event.wait() [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] result = hub.switch() [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.643973] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return self.greenlet.switch() [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] result = function(*args, **kwargs) [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] return func(*args, **kwargs) [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] raise e [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] nwinfo = self.network_api.allocate_for_instance( [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] created_port_ids = self._update_ports_for_instance( [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] with excutils.save_and_reraise_exception(): [ 691.644304] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] self.force_reraise() [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] raise self.value [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] updated_port = self._update_port( [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] _ensure_no_port_binding_failure(port) [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] raise exception.PortBindingFailed(port_id=port['id']) [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] nova.exception.PortBindingFailed: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. [ 691.644617] env[61648]: ERROR nova.compute.manager [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] [ 691.644943] env[61648]: DEBUG nova.compute.utils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 691.645590] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.951s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.653440] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Build of instance 8c8a339c-e52a-4257-9191-4e03ecf87b22 was re-scheduled: Binding failed for port b73a1922-a46f-4870-be29-d33b5b919064, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 691.653962] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 691.654303] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.654512] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.654726] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.656349] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 691.683125] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.696057] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 691.696314] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 691.696467] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.696655] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 691.696798] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.696943] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 691.697444] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 691.697698] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 691.697936] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 691.698175] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 691.698559] env[61648]: DEBUG nova.virt.hardware [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 691.700917] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d7ccd9-94fa-4ba5-8a1d-b0a977b05fa2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.709927] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebb8298-48e7-49f6-8310-e5824c14443d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.724962] env[61648]: ERROR nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Traceback (most recent call last): [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] yield resources [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self.driver.spawn(context, instance, image_meta, [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] vm_ref = self.build_virtual_machine(instance, [ 691.724962] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] for vif in network_info: [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] return self._sync_wrapper(fn, *args, **kwargs) [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self.wait() [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self[:] = self._gt.wait() [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] return self._exit_event.wait() [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 691.725332] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] current.throw(*self._exc) [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] result = function(*args, **kwargs) [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] return func(*args, **kwargs) [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] raise e [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] nwinfo = self.network_api.allocate_for_instance( [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] created_port_ids = self._update_ports_for_instance( [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] with excutils.save_and_reraise_exception(): [ 691.725750] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self.force_reraise() [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] raise self.value [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] updated_port = self._update_port( [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] _ensure_no_port_binding_failure(port) [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] raise exception.PortBindingFailed(port_id=port['id']) [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 691.726108] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] [ 691.726108] env[61648]: INFO nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Terminating instance [ 691.729967] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.730230] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquired lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.730450] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.758808] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 691.759057] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 691.759216] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.759392] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 691.759534] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.759678] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 691.759881] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 691.760185] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 691.760441] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 691.760666] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 691.760897] env[61648]: DEBUG nova.virt.hardware [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 691.761791] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17632fe-205a-4d93-a7d4-f702187cef02 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.769324] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a3fe17-8fb6-4449-90a0-1a03e31c2d61 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.774115] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.787486] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.793143] env[61648]: DEBUG oslo.service.loopingcall [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 691.794395] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-8c8a339c-e52a-4257-9191-4e03ecf87b22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.794670] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 691.794903] env[61648]: DEBUG nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 691.795147] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 691.796836] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 691.797390] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc9a9109-414b-4999-97ec-23f38148ad7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.817872] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.817872] env[61648]: value = "task-1336636" [ 691.817872] env[61648]: _type = "Task" [ 691.817872] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.826263] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336636, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.827104] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.847493] env[61648]: DEBUG nova.compute.manager [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Received event network-changed-cac25edb-f1d6-467a-ac0a-cb9f4f5f442d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 691.847672] env[61648]: DEBUG nova.compute.manager [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Refreshing instance network info cache due to event network-changed-cac25edb-f1d6-467a-ac0a-cb9f4f5f442d. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 691.847888] env[61648]: DEBUG oslo_concurrency.lockutils [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] Acquiring lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.256210] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.328570] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336636, 'name': CreateVM_Task, 'duration_secs': 0.266343} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.334018] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 692.334018] env[61648]: DEBUG nova.network.neutron [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.335311] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.335461] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.335800] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 692.336267] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-184f8867-32f7-4f63-b343-4d9febdf7e78 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.341727] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 692.341727] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]5231e823-96d6-592b-6506-ff98e445291f" [ 692.341727] env[61648]: _type = "Task" [ 692.341727] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.357732] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5231e823-96d6-592b-6506-ff98e445291f, 'name': SearchDatastore_Task, 'duration_secs': 0.008343} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.358014] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.358248] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.358456] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.358589] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.358750] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.358999] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13f98576-5091-4ffe-ad8d-c51bf47afc40 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.368888] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.368888] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.371143] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1a09f3f-86be-42bc-a51b-8bfbe5dd1b9c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.378233] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 692.378233] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]521ac3e5-5790-6177-08c8-c6b184c6f1a2" [ 692.378233] env[61648]: _type = "Task" [ 692.378233] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.379029] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.390910] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]521ac3e5-5790-6177-08c8-c6b184c6f1a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.603998] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9c834a-f171-4ef7-be45-6de098c2153e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.610854] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de13da7e-fca6-47e0-9f75-87c4cff6c160 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.639692] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf354cd-f36d-4eff-bcbf-d1e465474c76 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.647026] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92eba35d-bacf-405f-a2d6-90a71b616aea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.659158] env[61648]: DEBUG nova.compute.provider_tree [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.839070] env[61648]: INFO nova.compute.manager [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 8c8a339c-e52a-4257-9191-4e03ecf87b22] Took 1.04 seconds to deallocate network for instance. [ 692.885522] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Releasing lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.886090] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 692.886294] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.886658] env[61648]: DEBUG oslo_concurrency.lockutils [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] Acquired lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.886877] env[61648]: DEBUG nova.network.neutron [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Refreshing network info cache for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.887936] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e30d2612-095b-484d-af1f-ea42f57a8c0c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.897037] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]521ac3e5-5790-6177-08c8-c6b184c6f1a2, 'name': SearchDatastore_Task, 'duration_secs': 0.00803} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.898543] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82237303-0452-42b0-8d2d-de964a1993fb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.904435] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1179b8a3-ee0e-4e85-b046-66b749fa61e5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.921607] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 692.921607] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52386d7a-94bb-4942-599c-f30965f08483" [ 692.921607] env[61648]: _type = "Task" [ 692.921607] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.931985] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 46672a70-7d6b-4a86-833b-a7583c71e595 could not be found. [ 692.932217] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 692.932415] env[61648]: INFO nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Took 0.05 seconds to destroy the instance on the hypervisor. [ 692.932649] env[61648]: DEBUG oslo.service.loopingcall [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.933728] env[61648]: DEBUG nova.compute.manager [-] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 692.933728] env[61648]: DEBUG nova.network.neutron [-] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 692.940926] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52386d7a-94bb-4942-599c-f30965f08483, 'name': SearchDatastore_Task, 'duration_secs': 0.008372} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.940926] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.940926] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 692.940926] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d58f8a4a-81d3-4315-b5fa-3d114fee14d5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.945389] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 692.945389] env[61648]: value = "task-1336637" [ 692.945389] env[61648]: _type = "Task" [ 692.945389] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.954057] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.963192] env[61648]: DEBUG nova.network.neutron [-] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.162686] env[61648]: DEBUG nova.scheduler.client.report [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 693.409109] env[61648]: DEBUG nova.network.neutron [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.455125] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336637, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473128} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.455383] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 693.455583] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 693.455892] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6663fc48-6bd1-4e95-8ec4-476600d760d6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.462956] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 693.462956] env[61648]: value = "task-1336638" [ 693.462956] env[61648]: _type = "Task" [ 693.462956] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.466459] env[61648]: DEBUG nova.network.neutron [-] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.470274] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.490296] env[61648]: DEBUG nova.network.neutron [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.668435] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.023s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.669170] env[61648]: ERROR nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Traceback (most recent call last): [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self.driver.spawn(context, instance, image_meta, [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] vm_ref = self.build_virtual_machine(instance, [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.669170] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] for vif in network_info: [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return self._sync_wrapper(fn, *args, **kwargs) [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self.wait() [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self[:] = self._gt.wait() [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return self._exit_event.wait() [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] result = hub.switch() [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.669668] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return self.greenlet.switch() [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] result = function(*args, **kwargs) [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] return func(*args, **kwargs) [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] raise e [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] nwinfo = self.network_api.allocate_for_instance( [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] created_port_ids = self._update_ports_for_instance( [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] with excutils.save_and_reraise_exception(): [ 693.670259] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] self.force_reraise() [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] raise self.value [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] updated_port = self._update_port( [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] _ensure_no_port_binding_failure(port) [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] raise exception.PortBindingFailed(port_id=port['id']) [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] nova.exception.PortBindingFailed: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. [ 693.670796] env[61648]: ERROR nova.compute.manager [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] [ 693.671261] env[61648]: DEBUG nova.compute.utils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 693.671261] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.547s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.672780] env[61648]: INFO nova.compute.claims [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.675992] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Build of instance e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22 was re-scheduled: Binding failed for port b728f6c3-5089-47a3-9f3c-190de17af12b, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 693.676571] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 693.677047] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquiring lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.677230] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Acquired lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.677399] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.866013] env[61648]: INFO nova.scheduler.client.report [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Deleted allocations for instance 8c8a339c-e52a-4257-9191-4e03ecf87b22 [ 693.974591] env[61648]: INFO nova.compute.manager [-] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Took 1.04 seconds to deallocate network for instance. [ 693.974905] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061848} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.977009] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 693.977774] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d59dd0-b145-400e-9e2e-1e41fe4d0e89 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.980771] env[61648]: DEBUG nova.compute.claims [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 693.980943] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.998719] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 693.999008] env[61648]: DEBUG oslo_concurrency.lockutils [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] Releasing lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.999156] env[61648]: DEBUG nova.compute.manager [req-32c2385a-810e-4d9f-8b35-f1724a020d80 req-8f4bd012-6d14-4b01-b98d-096bf5945cb9 service nova] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Received event network-vif-deleted-cac25edb-f1d6-467a-ac0a-cb9f4f5f442d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 693.999454] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25a7c13e-84ff-428f-9b8c-cb4181b895ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.018449] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 694.018449] env[61648]: value = "task-1336639" [ 694.018449] env[61648]: _type = "Task" [ 694.018449] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.026636] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336639, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.204192] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.312240] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.377767] env[61648]: DEBUG oslo_concurrency.lockutils [None req-430cb721-118e-4284-a742-ba782dac7304 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "8c8a339c-e52a-4257-9191-4e03ecf87b22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.183s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.529272] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336639, 'name': ReconfigVM_Task, 'duration_secs': 0.256447} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.529630] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.530661] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5d5b0df-dfd7-41e3-a3ad-503b80d8792a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.537271] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 694.537271] env[61648]: value = "task-1336640" [ 694.537271] env[61648]: _type = "Task" [ 694.537271] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.545084] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336640, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.815395] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Releasing lock "refresh_cache-e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.815629] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 694.815833] env[61648]: DEBUG nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 694.816007] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.843088] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.880874] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 695.047496] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336640, 'name': Rename_Task, 'duration_secs': 0.129835} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.050502] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.051176] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a648e69-ae26-4e3f-a846-686ffc3eff50 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.058130] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 695.058130] env[61648]: value = "task-1336641" [ 695.058130] env[61648]: _type = "Task" [ 695.058130] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.067523] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.068854] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dfe4dc-1aad-4b13-9f76-75b97042d53a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.077172] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ab019e-cd08-42af-9bbd-aecc3b662dc2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.107960] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624cbed2-6f32-4c21-958a-442b768aae32 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.117644] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329dbce7-0cfb-4db5-8b2a-269560970209 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.134637] env[61648]: DEBUG nova.compute.provider_tree [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.352900] env[61648]: DEBUG nova.network.neutron [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.413283] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.567906] env[61648]: DEBUG oslo_vmware.api [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336641, 'name': PowerOnVM_Task, 'duration_secs': 0.453366} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.568305] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 695.568513] env[61648]: DEBUG nova.compute.manager [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 695.569279] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f24058-1efb-45d6-b438-fea0100b6b54 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.639854] env[61648]: DEBUG nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.856440] env[61648]: INFO nova.compute.manager [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] [instance: e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22] Took 1.04 seconds to deallocate network for instance. [ 695.926393] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "af5e6f7b-7c21-44d1-a05c-0d34f59c0065" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.926637] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "af5e6f7b-7c21-44d1-a05c-0d34f59c0065" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.091209] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.147397] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.148573] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 696.150245] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.110s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.654248] env[61648]: DEBUG nova.compute.utils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.655635] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 696.656047] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.720554] env[61648]: DEBUG nova.policy [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4551d6d29a19414eba41d37aac385891', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '89e62ab1d08c4a8ead5a277b5ef234c5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 696.884981] env[61648]: INFO nova.scheduler.client.report [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Deleted allocations for instance e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22 [ 697.098020] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Successfully created port: 8daa647a-0fbf-497a-a216-da829bf525a9 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.139268] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcc2842-6e1c-49b8-beac-72d29e8e3a80 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.148125] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad63ca72-0485-470a-94cf-f30ee5eebf27 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.183507] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 697.186085] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d702948-c7fe-4576-afbf-bde90ac11471 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.192939] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875f221e-a5ad-4ad6-91e7-3e2f0f15f874 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.208164] env[61648]: DEBUG nova.compute.provider_tree [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.333498] env[61648]: INFO nova.compute.manager [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Rebuilding instance [ 697.380434] env[61648]: DEBUG nova.compute.manager [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 697.381221] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c918d467-c13b-491a-a5f0-d3d34a2376f6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.397479] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a927a66d-368e-4234-9ca6-aefc95c88d8e tempest-ServerRescueNegativeTestJSON-1218299072 tempest-ServerRescueNegativeTestJSON-1218299072-project-member] Lock "e5bf9ce1-f12d-4f82-b1b2-e6dfeaffea22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.141s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.710291] env[61648]: DEBUG nova.scheduler.client.report [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.890204] env[61648]: DEBUG nova.compute.manager [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Received event network-changed-8daa647a-0fbf-497a-a216-da829bf525a9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 697.890414] env[61648]: DEBUG nova.compute.manager [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Refreshing instance network info cache due to event network-changed-8daa647a-0fbf-497a-a216-da829bf525a9. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 697.890640] env[61648]: DEBUG oslo_concurrency.lockutils [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] Acquiring lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.890792] env[61648]: DEBUG oslo_concurrency.lockutils [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] Acquired lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.890984] env[61648]: DEBUG nova.network.neutron [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Refreshing network info cache for port 8daa647a-0fbf-497a-a216-da829bf525a9 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.893802] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 697.894406] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a9cc35b-b79c-4f1f-ad37-7c931d0e5844 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.902473] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 697.910919] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 697.910919] env[61648]: value = "task-1336642" [ 697.910919] env[61648]: _type = "Task" [ 697.910919] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.925224] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.163699] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 698.163699] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.163699] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.163699] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.163699] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.163699] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.163699] env[61648]: ERROR nova.compute.manager raise self.value [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.163699] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 698.163699] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.163699] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 698.164168] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.164168] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 698.164168] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 698.164168] env[61648]: ERROR nova.compute.manager [ 698.164168] env[61648]: Traceback (most recent call last): [ 698.164168] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 698.164168] env[61648]: listener.cb(fileno) [ 698.164168] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.164168] env[61648]: result = function(*args, **kwargs) [ 698.164168] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.164168] env[61648]: return func(*args, **kwargs) [ 698.164168] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.164168] env[61648]: raise e [ 698.164168] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.164168] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 698.164168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.164168] env[61648]: created_port_ids = self._update_ports_for_instance( [ 698.164168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.164168] env[61648]: with excutils.save_and_reraise_exception(): [ 698.164168] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.164168] env[61648]: self.force_reraise() [ 698.164168] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.164168] env[61648]: raise self.value [ 698.164168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.164168] env[61648]: updated_port = self._update_port( [ 698.164168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.164168] env[61648]: _ensure_no_port_binding_failure(port) [ 698.164168] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.164168] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 698.164953] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 698.164953] env[61648]: Removing descriptor: 16 [ 698.196162] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 698.215302] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.065s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.215991] env[61648]: ERROR nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Traceback (most recent call last): [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self.driver.spawn(context, instance, image_meta, [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] vm_ref = self.build_virtual_machine(instance, [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 698.215991] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] for vif in network_info: [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return self._sync_wrapper(fn, *args, **kwargs) [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self.wait() [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self[:] = self._gt.wait() [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return self._exit_event.wait() [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] result = hub.switch() [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 698.216360] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return self.greenlet.switch() [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] result = function(*args, **kwargs) [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] return func(*args, **kwargs) [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] raise e [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] nwinfo = self.network_api.allocate_for_instance( [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] created_port_ids = self._update_ports_for_instance( [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] with excutils.save_and_reraise_exception(): [ 698.216700] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] self.force_reraise() [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] raise self.value [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] updated_port = self._update_port( [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] _ensure_no_port_binding_failure(port) [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] raise exception.PortBindingFailed(port_id=port['id']) [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] nova.exception.PortBindingFailed: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. [ 698.217056] env[61648]: ERROR nova.compute.manager [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] [ 698.217347] env[61648]: DEBUG nova.compute.utils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 698.223950] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Build of instance 33e270fd-0393-4425-8312-1e9fc91f3d1f was re-scheduled: Binding failed for port 96a8ab00-8cbd-4641-a7de-ddafd50bfe92, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 698.223950] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 698.223950] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquiring lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.223950] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Acquired lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.224262] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.231192] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.030s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.246113] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 698.246400] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 698.246579] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.246794] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 698.246960] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.247186] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 698.247439] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 698.247621] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 698.247819] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 698.248034] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 698.248245] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.249372] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc97ead2-32d3-49b3-b674-a4d279845884 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.258543] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8cc29dc-dcbf-4a31-b607-aac8133599dd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.276117] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Traceback (most recent call last): [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] yield resources [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self.driver.spawn(context, instance, image_meta, [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] vm_ref = self.build_virtual_machine(instance, [ 698.276117] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] vif_infos = vmwarevif.get_vif_info(self._session, [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] for vif in network_info: [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] return self._sync_wrapper(fn, *args, **kwargs) [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self.wait() [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self[:] = self._gt.wait() [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] return self._exit_event.wait() [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 698.276492] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] current.throw(*self._exc) [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] result = function(*args, **kwargs) [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] return func(*args, **kwargs) [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] raise e [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] nwinfo = self.network_api.allocate_for_instance( [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] created_port_ids = self._update_ports_for_instance( [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] with excutils.save_and_reraise_exception(): [ 698.276859] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self.force_reraise() [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] raise self.value [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] updated_port = self._update_port( [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] _ensure_no_port_binding_failure(port) [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] raise exception.PortBindingFailed(port_id=port['id']) [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 698.277217] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] [ 698.277217] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Terminating instance [ 698.278320] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.409469] env[61648]: DEBUG nova.network.neutron [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.436921] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336642, 'name': PowerOffVM_Task, 'duration_secs': 0.174135} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.437227] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 698.437445] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.440505] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b292fe1-f1fe-4f1e-a090-7289d7868344 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.452248] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 698.453321] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.453566] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62c0e122-b932-4611-9c85-1c0110828597 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.481251] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 698.481558] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 698.481708] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Deleting the datastore file [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 698.481974] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0b9fbdc-8b07-4bf6-8380-4c65d76f8fa1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.492041] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 698.492041] env[61648]: value = "task-1336644" [ 698.492041] env[61648]: _type = "Task" [ 698.492041] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.499443] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.521951] env[61648]: DEBUG nova.network.neutron [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.821202] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.905985] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.002731] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098646} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.002795] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 699.002954] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 699.004331] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.025029] env[61648]: DEBUG oslo_concurrency.lockutils [req-69355bff-8f23-48d1-acd1-0d7f62906ddd req-7cbfb7c0-712a-4ce2-a412-edac3f377f26 service nova] Releasing lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.027525] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquired lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.029297] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.172720] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45501b74-385d-4ac0-858a-2368e50f4222 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.180134] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb8d910-3334-4540-b8c8-223039b38521 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.213494] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9cd83d-0df2-4d51-ac8d-604d1690cae3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.225118] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fdecac-c989-4809-9e01-b8291eeec762 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.236422] env[61648]: DEBUG nova.compute.provider_tree [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.408976] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Releasing lock "refresh_cache-33e270fd-0393-4425-8312-1e9fc91f3d1f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.409104] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 699.409291] env[61648]: DEBUG nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.409430] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.430174] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.553941] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.630981] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.739692] env[61648]: DEBUG nova.scheduler.client.report [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.912871] env[61648]: DEBUG nova.compute.manager [req-3fb147d1-8f47-4f16-a943-02d72175e01f req-a319f7b2-e44d-4b8c-b9e9-dc75c8ad9d29 service nova] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Received event network-vif-deleted-8daa647a-0fbf-497a-a216-da829bf525a9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 699.932921] env[61648]: DEBUG nova.network.neutron [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.040357] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 700.040520] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 700.040642] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.040829] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 700.041147] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.041236] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 700.041421] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 700.041587] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 700.041773] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 700.041950] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 700.042155] env[61648]: DEBUG nova.virt.hardware [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 700.043423] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbc5109-e5ab-4783-846d-21e9aca74aa4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.051834] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c72168-273b-4962-a6e5-7988e816da4e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.065087] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.070699] env[61648]: DEBUG oslo.service.loopingcall [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.070923] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.071136] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-697eef33-0f19-4c32-9d63-ffbbb7068875 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.087382] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.087382] env[61648]: value = "task-1336645" [ 700.087382] env[61648]: _type = "Task" [ 700.087382] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.096078] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336645, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.133993] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Releasing lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.134545] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 700.134760] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.135133] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf79499a-1dce-4745-9764-b922dd15161a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.143432] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7147ca-86d9-4b6d-8d7f-730f73f4305f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.164763] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a9d3592b-56f7-4823-bf0c-8b92ac4587bb could not be found. [ 700.164987] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.165186] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 700.165431] env[61648]: DEBUG oslo.service.loopingcall [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.165654] env[61648]: DEBUG nova.compute.manager [-] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.165750] env[61648]: DEBUG nova.network.neutron [-] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.191521] env[61648]: DEBUG nova.network.neutron [-] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.245306] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.016s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.245770] env[61648]: ERROR nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Traceback (most recent call last): [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self.driver.spawn(context, instance, image_meta, [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] vm_ref = self.build_virtual_machine(instance, [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 700.245770] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] for vif in network_info: [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return self._sync_wrapper(fn, *args, **kwargs) [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self.wait() [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self[:] = self._gt.wait() [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return self._exit_event.wait() [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] result = hub.switch() [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 700.246172] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return self.greenlet.switch() [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] result = function(*args, **kwargs) [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] return func(*args, **kwargs) [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] raise e [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] nwinfo = self.network_api.allocate_for_instance( [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] created_port_ids = self._update_ports_for_instance( [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] with excutils.save_and_reraise_exception(): [ 700.246539] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] self.force_reraise() [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] raise self.value [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] updated_port = self._update_port( [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] _ensure_no_port_binding_failure(port) [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] raise exception.PortBindingFailed(port_id=port['id']) [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] nova.exception.PortBindingFailed: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. [ 700.246862] env[61648]: ERROR nova.compute.manager [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] [ 700.247152] env[61648]: DEBUG nova.compute.utils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 700.249259] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.088s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.254390] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Build of instance be7776f1-0083-4c40-a7e6-477c0c65f7bf was re-scheduled: Binding failed for port 9342f0e9-5428-41eb-bd99-b28bcf9c0e45, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 700.254390] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 700.254390] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquiring lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.254390] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Acquired lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.254646] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 700.435754] env[61648]: INFO nova.compute.manager [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] [instance: 33e270fd-0393-4425-8312-1e9fc91f3d1f] Took 1.03 seconds to deallocate network for instance. [ 700.597770] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336645, 'name': CreateVM_Task, 'duration_secs': 0.243011} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.597770] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 700.597770] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.597770] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.597993] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 700.598194] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf88ec22-f943-4cda-9d12-afbd3d6dfe67 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.602849] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 700.602849] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52125a64-fdc0-1c2d-3fc9-fb4612c7c797" [ 700.602849] env[61648]: _type = "Task" [ 700.602849] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.609644] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52125a64-fdc0-1c2d-3fc9-fb4612c7c797, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.694247] env[61648]: DEBUG nova.network.neutron [-] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.792782] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.867271] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.116635] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52125a64-fdc0-1c2d-3fc9-fb4612c7c797, 'name': SearchDatastore_Task, 'duration_secs': 0.00854} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.119067] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.119307] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 701.119532] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.119676] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.119846] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.120292] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91ec8033-cb89-43cd-b82c-796535c88717 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.128347] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.128518] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 701.129217] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e796fec-7f12-4dac-9643-822e67aa5323 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.136749] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 701.136749] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52ac7306-1248-aee0-83f8-48922b3f0cba" [ 701.136749] env[61648]: _type = "Task" [ 701.136749] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.143660] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52ac7306-1248-aee0-83f8-48922b3f0cba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.157696] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da68ec5f-3da5-4958-b77a-e22228b7aad1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.164327] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3b0bbf-9c84-4a21-9d4f-1adb71b7e5ee {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.195084] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4d5700-4c89-4f2f-856b-fc9c2496521b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.196120] env[61648]: INFO nova.compute.manager [-] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Took 1.03 seconds to deallocate network for instance. [ 701.199928] env[61648]: DEBUG nova.compute.claims [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 701.200101] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.201504] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef310d3-e321-42bb-9885-d8b96a1fd1a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.213968] env[61648]: DEBUG nova.compute.provider_tree [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.372098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Releasing lock "refresh_cache-be7776f1-0083-4c40-a7e6-477c0c65f7bf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.372354] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 701.372516] env[61648]: DEBUG nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 701.372677] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 701.391076] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.471834] env[61648]: INFO nova.scheduler.client.report [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Deleted allocations for instance 33e270fd-0393-4425-8312-1e9fc91f3d1f [ 701.646178] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52ac7306-1248-aee0-83f8-48922b3f0cba, 'name': SearchDatastore_Task, 'duration_secs': 0.007732} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.646941] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa77a25-a6ab-4752-8b6e-c7fa66318601 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.652463] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 701.652463] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52fba108-a97d-05fb-bbc3-8593b34d9e98" [ 701.652463] env[61648]: _type = "Task" [ 701.652463] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.659201] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52fba108-a97d-05fb-bbc3-8593b34d9e98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.720388] env[61648]: DEBUG nova.scheduler.client.report [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.893154] env[61648]: DEBUG nova.network.neutron [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.991124] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbbb7af7-1019-4b24-aa50-1fe17683974c tempest-FloatingIPsAssociationNegativeTestJSON-72981925 tempest-FloatingIPsAssociationNegativeTestJSON-72981925-project-member] Lock "33e270fd-0393-4425-8312-1e9fc91f3d1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.505s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.163353] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52fba108-a97d-05fb-bbc3-8593b34d9e98, 'name': SearchDatastore_Task, 'duration_secs': 0.007908} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.163353] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.163353] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 702.163353] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbd599b3-1c99-4ff4-b2af-637ed11eac9f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.170012] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 702.170012] env[61648]: value = "task-1336646" [ 702.170012] env[61648]: _type = "Task" [ 702.170012] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.178311] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.227297] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.978s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.227297] env[61648]: ERROR nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] Traceback (most recent call last): [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self.driver.spawn(context, instance, image_meta, [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.227297] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] vm_ref = self.build_virtual_machine(instance, [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] for vif in network_info: [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] return self._sync_wrapper(fn, *args, **kwargs) [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self.wait() [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self[:] = self._gt.wait() [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] return self._exit_event.wait() [ 702.227839] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] current.throw(*self._exc) [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] result = function(*args, **kwargs) [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] return func(*args, **kwargs) [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] raise e [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] nwinfo = self.network_api.allocate_for_instance( [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] created_port_ids = self._update_ports_for_instance( [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 702.228225] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] with excutils.save_and_reraise_exception(): [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] self.force_reraise() [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] raise self.value [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] updated_port = self._update_port( [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] _ensure_no_port_binding_failure(port) [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] raise exception.PortBindingFailed(port_id=port['id']) [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] nova.exception.PortBindingFailed: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. [ 702.228601] env[61648]: ERROR nova.compute.manager [instance: 9008460a-6b35-468d-803c-d10c139494f7] [ 702.228935] env[61648]: DEBUG nova.compute.utils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 702.231217] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.990s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.231217] env[61648]: INFO nova.compute.claims [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.234069] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Build of instance 9008460a-6b35-468d-803c-d10c139494f7 was re-scheduled: Binding failed for port 83dc9ed5-d8bf-41ea-a443-d86b41c7ee09, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 702.234069] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 702.234260] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquiring lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.234399] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Acquired lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.234547] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.396401] env[61648]: INFO nova.compute.manager [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] [instance: be7776f1-0083-4c40-a7e6-477c0c65f7bf] Took 1.02 seconds to deallocate network for instance. [ 702.492407] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 702.679577] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336646, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.760580] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.880988] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.018559] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.180860] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336646, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529131} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.181199] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 703.181405] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 703.181902] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5984148-f0e8-4f04-aa04-7624600878ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.188364] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 703.188364] env[61648]: value = "task-1336647" [ 703.188364] env[61648]: _type = "Task" [ 703.188364] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.196606] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336647, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.383037] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Releasing lock "refresh_cache-9008460a-6b35-468d-803c-d10c139494f7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.383302] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 703.383481] env[61648]: DEBUG nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.383646] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 703.403606] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.436519] env[61648]: INFO nova.scheduler.client.report [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Deleted allocations for instance be7776f1-0083-4c40-a7e6-477c0c65f7bf [ 703.650941] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622624e9-d3a9-4b95-b042-958f3f34bfda {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.661747] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb9f8b7-70a4-48bf-b224-8d145a3a77de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.702187] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe4f723-df1c-45fd-9614-89cffb74d30e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.709739] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336647, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067002} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.715020] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 703.715020] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ddd2c1-e416-494c-9075-ecf23c66bdbc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.717175] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278fde14-78be-4cd1-81bc-1d516956baa2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.743078] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 703.751752] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67748e78-cddd-4595-bc7b-0365a2e74688 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.766095] env[61648]: DEBUG nova.compute.provider_tree [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.772939] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 703.772939] env[61648]: value = "task-1336648" [ 703.772939] env[61648]: _type = "Task" [ 703.772939] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.781163] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336648, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.907118] env[61648]: DEBUG nova.network.neutron [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.951494] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b083399-e362-491f-90fb-cafce4261f95 tempest-MigrationsAdminTest-286825448 tempest-MigrationsAdminTest-286825448-project-member] Lock "be7776f1-0083-4c40-a7e6-477c0c65f7bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.871s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.273220] env[61648]: DEBUG nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.293688] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336648, 'name': ReconfigVM_Task, 'duration_secs': 0.274058} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.293982] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569/55630bdb-fe38-49dc-baa2-2ac5de20e569.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 704.294600] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb9c6c97-535f-45a4-a57b-8615b0790f87 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.303224] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 704.303224] env[61648]: value = "task-1336649" [ 704.303224] env[61648]: _type = "Task" [ 704.303224] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.312144] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336649, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.409724] env[61648]: INFO nova.compute.manager [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] [instance: 9008460a-6b35-468d-803c-d10c139494f7] Took 1.03 seconds to deallocate network for instance. [ 704.453110] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 704.784777] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.784777] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 704.788934] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.942s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.790608] env[61648]: INFO nova.compute.claims [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.815431] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336649, 'name': Rename_Task, 'duration_secs': 0.130702} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.815753] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.816070] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-536bba4c-d059-4e96-a1ff-143447ab7332 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.822432] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Waiting for the task: (returnval){ [ 704.822432] env[61648]: value = "task-1336650" [ 704.822432] env[61648]: _type = "Task" [ 704.822432] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.833077] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.983120] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.296280] env[61648]: DEBUG nova.compute.utils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 705.299992] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 705.300309] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 705.332907] env[61648]: DEBUG oslo_vmware.api [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Task: {'id': task-1336650, 'name': PowerOnVM_Task, 'duration_secs': 0.482901} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.333496] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.333754] env[61648]: DEBUG nova.compute.manager [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 705.334604] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783194b8-90e5-41ef-afb6-dad0b648f4ee {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.365820] env[61648]: DEBUG nova.policy [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4551d6d29a19414eba41d37aac385891', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '89e62ab1d08c4a8ead5a277b5ef234c5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.454995] env[61648]: INFO nova.scheduler.client.report [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Deleted allocations for instance 9008460a-6b35-468d-803c-d10c139494f7 [ 705.800575] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 705.858036] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.968788] env[61648]: DEBUG oslo_concurrency.lockutils [None req-22f06a1e-fa16-4a2c-a7a9-b9fe891ca6a4 tempest-FloatingIPsAssociationTestJSON-2002236079 tempest-FloatingIPsAssociationTestJSON-2002236079-project-member] Lock "9008460a-6b35-468d-803c-d10c139494f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.014s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.089485] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Successfully created port: a464193c-7358-4c31-8701-09510a31be57 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.196289] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b698dec6-768c-4eb8-82c5-a73f6136d797 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.208346] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2994d69d-317c-45d6-8d88-c018719ffc2e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.266015] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3570bf8-5822-40f0-b741-ebceb2e87752 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.276698] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30b0a7c-d1aa-4b8e-a042-1faaab694a31 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.299564] env[61648]: DEBUG nova.compute.provider_tree [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.472431] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 706.806140] env[61648]: DEBUG nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.810713] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 706.850816] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.851060] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.851276] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.851406] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.851553] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.852067] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.852423] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.852655] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.853742] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.853742] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.853742] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.854120] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b76287f-ea6e-40f4-a8d2-b65beaf443c1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.865220] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a682307-289c-4141-b48b-39b9e521b53f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.917213] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquiring lock "c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.917213] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Lock "c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.000351] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.194119] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "55630bdb-fe38-49dc-baa2-2ac5de20e569" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.194891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "55630bdb-fe38-49dc-baa2-2ac5de20e569" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.194891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "55630bdb-fe38-49dc-baa2-2ac5de20e569-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.194891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "55630bdb-fe38-49dc-baa2-2ac5de20e569-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.195060] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "55630bdb-fe38-49dc-baa2-2ac5de20e569-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.202180] env[61648]: INFO nova.compute.manager [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Terminating instance [ 707.205635] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "refresh_cache-55630bdb-fe38-49dc-baa2-2ac5de20e569" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.205635] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquired lock "refresh_cache-55630bdb-fe38-49dc-baa2-2ac5de20e569" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.205779] env[61648]: DEBUG nova.network.neutron [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.318443] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.318982] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 707.322193] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.196s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.574297] env[61648]: DEBUG nova.compute.manager [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Received event network-changed-a464193c-7358-4c31-8701-09510a31be57 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 707.574856] env[61648]: DEBUG nova.compute.manager [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Refreshing instance network info cache due to event network-changed-a464193c-7358-4c31-8701-09510a31be57. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 707.580404] env[61648]: DEBUG oslo_concurrency.lockutils [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] Acquiring lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.580404] env[61648]: DEBUG oslo_concurrency.lockutils [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] Acquired lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.580404] env[61648]: DEBUG nova.network.neutron [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Refreshing network info cache for port a464193c-7358-4c31-8701-09510a31be57 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.735145] env[61648]: DEBUG nova.network.neutron [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.819583] env[61648]: DEBUG nova.network.neutron [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.827531] env[61648]: DEBUG nova.compute.utils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.831987] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.832172] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.925883] env[61648]: DEBUG nova.policy [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4551d6d29a19414eba41d37aac385891', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '89e62ab1d08c4a8ead5a277b5ef234c5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 707.943621] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 707.943621] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.943621] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.943621] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.943621] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.943621] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.943621] env[61648]: ERROR nova.compute.manager raise self.value [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.943621] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 707.943621] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.943621] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 707.944393] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.944393] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 707.944393] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 707.944393] env[61648]: ERROR nova.compute.manager [ 707.944393] env[61648]: Traceback (most recent call last): [ 707.944393] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 707.944393] env[61648]: listener.cb(fileno) [ 707.944393] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 707.944393] env[61648]: result = function(*args, **kwargs) [ 707.944393] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 707.944393] env[61648]: return func(*args, **kwargs) [ 707.944393] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 707.944393] env[61648]: raise e [ 707.944393] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.944393] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 707.944393] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.944393] env[61648]: created_port_ids = self._update_ports_for_instance( [ 707.944393] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.944393] env[61648]: with excutils.save_and_reraise_exception(): [ 707.944393] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.944393] env[61648]: self.force_reraise() [ 707.944393] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.944393] env[61648]: raise self.value [ 707.944393] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.944393] env[61648]: updated_port = self._update_port( [ 707.944393] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.944393] env[61648]: _ensure_no_port_binding_failure(port) [ 707.944393] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.944393] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 707.945765] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 707.945765] env[61648]: Removing descriptor: 16 [ 707.945765] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] Traceback (most recent call last): [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] yield resources [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self.driver.spawn(context, instance, image_meta, [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self._vmops.spawn(context, instance, image_meta, injected_files, [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 707.945765] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] vm_ref = self.build_virtual_machine(instance, [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] vif_infos = vmwarevif.get_vif_info(self._session, [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] for vif in network_info: [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return self._sync_wrapper(fn, *args, **kwargs) [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self.wait() [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self[:] = self._gt.wait() [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return self._exit_event.wait() [ 707.946293] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] result = hub.switch() [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return self.greenlet.switch() [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] result = function(*args, **kwargs) [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return func(*args, **kwargs) [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] raise e [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] nwinfo = self.network_api.allocate_for_instance( [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.946698] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] created_port_ids = self._update_ports_for_instance( [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] with excutils.save_and_reraise_exception(): [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self.force_reraise() [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] raise self.value [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] updated_port = self._update_port( [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] _ensure_no_port_binding_failure(port) [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.947162] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] raise exception.PortBindingFailed(port_id=port['id']) [ 707.947473] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 707.947473] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] [ 707.947473] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Terminating instance [ 707.947473] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.111102] env[61648]: DEBUG nova.network.neutron [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.234708] env[61648]: DEBUG nova.network.neutron [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.264136] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07183cdd-ca4f-477d-8a8b-2960f44c9614 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.272563] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8316ec3f-08d1-4550-aa97-e72fb90a6268 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.308021] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8ce858-de91-4ad9-9ba6-1d61e9e8a895 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.315892] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c6dde8-3976-4370-9520-4c5df988b65e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.321601] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Releasing lock "refresh_cache-55630bdb-fe38-49dc-baa2-2ac5de20e569" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.322073] env[61648]: DEBUG nova.compute.manager [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 708.322274] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.323110] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5cc866-ae24-4140-b016-9994df3b8374 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.334382] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 708.337242] env[61648]: DEBUG nova.compute.provider_tree [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.342615] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 708.343046] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac562b10-845b-43ef-94ab-f4916787eeb1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.349210] env[61648]: DEBUG oslo_vmware.api [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 708.349210] env[61648]: value = "task-1336651" [ 708.349210] env[61648]: _type = "Task" [ 708.349210] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.358959] env[61648]: DEBUG oslo_vmware.api [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.539847] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Successfully created port: fe518c10-5f4a-4f0d-953b-14595a2cca7d {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.738460] env[61648]: DEBUG oslo_concurrency.lockutils [req-499bbab0-b3d0-4749-a5a3-bcee98b008ff req-5106ca1a-cdba-4a02-b33d-958392769af6 service nova] Releasing lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.738882] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquired lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.739071] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 708.847024] env[61648]: DEBUG nova.scheduler.client.report [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.863389] env[61648]: DEBUG oslo_vmware.api [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336651, 'name': PowerOffVM_Task, 'duration_secs': 0.116229} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.866039] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 708.866039] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 708.866039] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c615212-8ddd-4fc1-a7ed-9336e70f4711 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.890892] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 708.890892] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 708.890892] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Deleting the datastore file [datastore2] 55630bdb-fe38-49dc-baa2-2ac5de20e569 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 708.891097] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-479b27a7-928d-4f87-8dd8-c8babb6d873c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.898318] env[61648]: DEBUG oslo_vmware.api [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for the task: (returnval){ [ 708.898318] env[61648]: value = "task-1336653" [ 708.898318] env[61648]: _type = "Task" [ 708.898318] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.905478] env[61648]: DEBUG oslo_vmware.api [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.321474] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.343602] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquiring lock "9cc301f6-45de-43b9-a88d-d94e3f00cff3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.343887] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Lock "9cc301f6-45de-43b9-a88d-d94e3f00cff3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.353047] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 709.358549] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.036s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.359135] env[61648]: ERROR nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Traceback (most recent call last): [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self.driver.spawn(context, instance, image_meta, [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] vm_ref = self.build_virtual_machine(instance, [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.359135] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] for vif in network_info: [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] return self._sync_wrapper(fn, *args, **kwargs) [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self.wait() [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self[:] = self._gt.wait() [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] return self._exit_event.wait() [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] current.throw(*self._exc) [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.359505] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] result = function(*args, **kwargs) [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] return func(*args, **kwargs) [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] raise e [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] nwinfo = self.network_api.allocate_for_instance( [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] created_port_ids = self._update_ports_for_instance( [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] with excutils.save_and_reraise_exception(): [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] self.force_reraise() [ 709.359843] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] raise self.value [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] updated_port = self._update_port( [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] _ensure_no_port_binding_failure(port) [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] raise exception.PortBindingFailed(port_id=port['id']) [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] nova.exception.PortBindingFailed: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. [ 709.360206] env[61648]: ERROR nova.compute.manager [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] [ 709.360206] env[61648]: DEBUG nova.compute.utils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 709.360960] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.380s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.368610] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Build of instance 458302d3-123c-47e8-bee8-6fe1462d5f4b was re-scheduled: Binding failed for port b1324f39-8021-4a1e-8858-95e8d192d6b3, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 709.371604] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 709.371604] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.371604] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquired lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.371604] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.396438] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.396701] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.397046] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.397269] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.397409] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.397548] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.397744] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.397961] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.398221] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.398390] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.398553] env[61648]: DEBUG nova.virt.hardware [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.399712] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd760ca4-5f77-4bd8-9297-bd543a36ae0e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.413754] env[61648]: DEBUG oslo_vmware.api [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Task: {'id': task-1336653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100928} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.414970] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f480d4-af69-43dd-a648-00a4b5916ded {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.418909] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 709.420156] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 709.420156] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 709.420156] env[61648]: INFO nova.compute.manager [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Took 1.10 seconds to destroy the instance on the hypervisor. [ 709.420156] env[61648]: DEBUG oslo.service.loopingcall [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 709.420998] env[61648]: DEBUG nova.compute.manager [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 709.420998] env[61648]: DEBUG nova.network.neutron [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 709.441927] env[61648]: DEBUG nova.network.neutron [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.486740] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.551120] env[61648]: DEBUG nova.compute.manager [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Received event network-changed-fe518c10-5f4a-4f0d-953b-14595a2cca7d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 709.551120] env[61648]: DEBUG nova.compute.manager [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Refreshing instance network info cache due to event network-changed-fe518c10-5f4a-4f0d-953b-14595a2cca7d. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 709.551331] env[61648]: DEBUG oslo_concurrency.lockutils [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] Acquiring lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.551474] env[61648]: DEBUG oslo_concurrency.lockutils [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] Acquired lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.551628] env[61648]: DEBUG nova.network.neutron [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Refreshing network info cache for port fe518c10-5f4a-4f0d-953b-14595a2cca7d {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.612320] env[61648]: DEBUG nova.compute.manager [req-df4fc484-2787-476e-8f4e-ed51171109e4 req-9b31cfa7-0eea-411b-8e46-2ed49d988d56 service nova] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Received event network-vif-deleted-a464193c-7358-4c31-8701-09510a31be57 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 709.830202] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 709.830202] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.830202] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.830202] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.830202] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.830202] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.830202] env[61648]: ERROR nova.compute.manager raise self.value [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.830202] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 709.830202] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.830202] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 709.831032] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.831032] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 709.831032] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 709.831032] env[61648]: ERROR nova.compute.manager [ 709.831032] env[61648]: Traceback (most recent call last): [ 709.831032] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 709.831032] env[61648]: listener.cb(fileno) [ 709.831032] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.831032] env[61648]: result = function(*args, **kwargs) [ 709.831032] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.831032] env[61648]: return func(*args, **kwargs) [ 709.831032] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.831032] env[61648]: raise e [ 709.831032] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.831032] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 709.831032] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.831032] env[61648]: created_port_ids = self._update_ports_for_instance( [ 709.831032] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.831032] env[61648]: with excutils.save_and_reraise_exception(): [ 709.831032] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.831032] env[61648]: self.force_reraise() [ 709.831032] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.831032] env[61648]: raise self.value [ 709.831032] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.831032] env[61648]: updated_port = self._update_port( [ 709.831032] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.831032] env[61648]: _ensure_no_port_binding_failure(port) [ 709.831032] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.831032] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 709.831909] env[61648]: nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 709.831909] env[61648]: Removing descriptor: 19 [ 709.832509] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Traceback (most recent call last): [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] yield resources [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self.driver.spawn(context, instance, image_meta, [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] vm_ref = self.build_virtual_machine(instance, [ 709.832509] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] for vif in network_info: [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return self._sync_wrapper(fn, *args, **kwargs) [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self.wait() [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self[:] = self._gt.wait() [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return self._exit_event.wait() [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 709.832999] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] result = hub.switch() [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return self.greenlet.switch() [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] result = function(*args, **kwargs) [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return func(*args, **kwargs) [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] raise e [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] nwinfo = self.network_api.allocate_for_instance( [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] created_port_ids = self._update_ports_for_instance( [ 709.833720] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] with excutils.save_and_reraise_exception(): [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self.force_reraise() [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] raise self.value [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] updated_port = self._update_port( [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] _ensure_no_port_binding_failure(port) [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] raise exception.PortBindingFailed(port_id=port['id']) [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 709.834156] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] [ 709.840034] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Terminating instance [ 709.842675] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.895252] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.943889] env[61648]: DEBUG nova.network.neutron [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.983431] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.989421] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Releasing lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.990540] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 709.990540] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 709.990540] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24601ab6-f027-4b8b-a8c2-5f322b45bfcb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.005454] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c123487d-fec9-4d42-85a2-a2b36349425e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.032614] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b07648a0-23a5-4dee-9582-ce393292b768 could not be found. [ 710.032856] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.033040] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Took 0.04 seconds to destroy the instance on the hypervisor. [ 710.033281] env[61648]: DEBUG oslo.service.loopingcall [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.035781] env[61648]: DEBUG nova.compute.manager [-] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.035869] env[61648]: DEBUG nova.network.neutron [-] [instance: b07648a0-23a5-4dee-9582-ce393292b768] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 710.058522] env[61648]: DEBUG nova.network.neutron [-] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.079374] env[61648]: DEBUG nova.network.neutron [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.186643] env[61648]: DEBUG nova.network.neutron [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.314579] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3805e6d-92cb-407c-a8ad-ced768a13791 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.324983] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed99a009-ac2f-4a8e-94e3-5f479c2d8c34 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.358808] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4331da-a8de-42e8-90d8-2dcaae41345e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.365190] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384ee7dd-1b3d-4801-8038-4184c54686e9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.381080] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.446556] env[61648]: INFO nova.compute.manager [-] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Took 1.03 seconds to deallocate network for instance. [ 710.485766] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Releasing lock "refresh_cache-458302d3-123c-47e8-bee8-6fe1462d5f4b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.486245] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 710.486370] env[61648]: DEBUG nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.486491] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 710.501952] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.560445] env[61648]: DEBUG nova.network.neutron [-] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.689163] env[61648]: DEBUG oslo_concurrency.lockutils [req-d25d67c3-7053-4b1f-bce0-d520d6a2e0cf req-b1ec13ba-64bb-432b-9ed0-7c778e53be4b service nova] Releasing lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.689658] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquired lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.689845] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.902405] env[61648]: ERROR nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [req-4a87aea7-1ba1-4455-8404-e150cb1192db] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4a87aea7-1ba1-4455-8404-e150cb1192db"}]}: nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 710.920718] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 710.939878] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 710.940183] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.952304] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 710.954761] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.980496] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 711.004626] env[61648]: DEBUG nova.network.neutron [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.065581] env[61648]: INFO nova.compute.manager [-] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Took 1.03 seconds to deallocate network for instance. [ 711.068126] env[61648]: DEBUG nova.compute.claims [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 711.068311] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.215961] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.310519] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.379103] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f12f7bd-8a4f-4d92-b759-28516862a2d8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.389785] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fb8bd7-cd95-4b5b-8d3c-76150d69348b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.424997] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69facc85-366d-45df-a6f7-de77e81a3170 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.432730] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4baf57-b49b-4475-8151-5eaa870f76da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.448075] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.508868] env[61648]: INFO nova.compute.manager [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 458302d3-123c-47e8-bee8-6fe1462d5f4b] Took 1.02 seconds to deallocate network for instance. [ 711.620878] env[61648]: DEBUG nova.compute.manager [req-98c782b0-e2a3-4fd9-ad3d-2b0d91582567 req-ee33b850-b217-4e0d-bba3-661d0a10877e service nova] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Received event network-vif-deleted-fe518c10-5f4a-4f0d-953b-14595a2cca7d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.816673] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Releasing lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.817345] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 711.817622] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 711.817914] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84261931-2a95-4ebd-8d84-c054f1a74308 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.828926] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eced84-a562-49be-a0b2-a85c7286c728 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.854268] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9d5a8b8-afc2-40dc-b480-0b946e085e18 could not be found. [ 711.854682] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 711.854930] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Took 0.04 seconds to destroy the instance on the hypervisor. [ 711.855205] env[61648]: DEBUG oslo.service.loopingcall [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.855433] env[61648]: DEBUG nova.compute.manager [-] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 711.855530] env[61648]: DEBUG nova.network.neutron [-] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.874352] env[61648]: DEBUG nova.network.neutron [-] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.987010] env[61648]: DEBUG nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 711.987300] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 76 to 77 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 711.987572] env[61648]: DEBUG nova.compute.provider_tree [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 712.377084] env[61648]: DEBUG nova.network.neutron [-] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.497032] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.134s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.497032] env[61648]: ERROR nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Traceback (most recent call last): [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self.driver.spawn(context, instance, image_meta, [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.497032] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] vm_ref = self.build_virtual_machine(instance, [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] for vif in network_info: [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] return self._sync_wrapper(fn, *args, **kwargs) [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self.wait() [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self[:] = self._gt.wait() [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] return self._exit_event.wait() [ 712.497524] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] current.throw(*self._exc) [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] result = function(*args, **kwargs) [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] return func(*args, **kwargs) [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] raise e [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] nwinfo = self.network_api.allocate_for_instance( [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] created_port_ids = self._update_ports_for_instance( [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.497868] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] with excutils.save_and_reraise_exception(): [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] self.force_reraise() [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] raise self.value [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] updated_port = self._update_port( [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] _ensure_no_port_binding_failure(port) [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] raise exception.PortBindingFailed(port_id=port['id']) [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] nova.exception.PortBindingFailed: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. [ 712.498224] env[61648]: ERROR nova.compute.manager [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] [ 712.498642] env[61648]: DEBUG nova.compute.utils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 712.504016] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.088s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.504016] env[61648]: INFO nova.compute.claims [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.508421] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Build of instance 46672a70-7d6b-4a86-833b-a7583c71e595 was re-scheduled: Binding failed for port cac25edb-f1d6-467a-ac0a-cb9f4f5f442d, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 712.509364] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 712.509725] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquiring lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.510219] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Acquired lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.510530] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.569017] env[61648]: INFO nova.scheduler.client.report [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Deleted allocations for instance 458302d3-123c-47e8-bee8-6fe1462d5f4b [ 712.880927] env[61648]: INFO nova.compute.manager [-] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Took 1.03 seconds to deallocate network for instance. [ 712.884049] env[61648]: DEBUG nova.compute.claims [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.884311] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.044830] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.075273] env[61648]: DEBUG oslo_concurrency.lockutils [None req-cca8389f-79ec-4e07-84d3-782423817f25 tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "458302d3-123c-47e8-bee8-6fe1462d5f4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.316s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.203691] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.578476] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 713.706492] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Releasing lock "refresh_cache-46672a70-7d6b-4a86-833b-a7583c71e595" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.706913] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 713.706995] env[61648]: DEBUG nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.707116] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.735070] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.951255] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17d2616-df7a-498c-900b-d7fc420b8745 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.957396] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dd0118-e33a-45a6-b245-49e4110f0c39 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.988868] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43380b52-ed29-4f84-9e96-2f59ceb83872 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.000154] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecac5f43-8841-4d47-9ed9-3598dfcf207e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.017693] env[61648]: DEBUG nova.compute.provider_tree [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.104700] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.243737] env[61648]: DEBUG nova.network.neutron [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.522672] env[61648]: DEBUG nova.scheduler.client.report [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 714.660987] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquiring lock "acbab424-c325-4e57-81a2-3d4a1ae4a081" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.661278] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Lock "acbab424-c325-4e57-81a2-3d4a1ae4a081" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.747581] env[61648]: INFO nova.compute.manager [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] [instance: 46672a70-7d6b-4a86-833b-a7583c71e595] Took 1.04 seconds to deallocate network for instance. [ 715.026044] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.026683] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.030120] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.939s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.030327] env[61648]: DEBUG nova.objects.instance [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61648) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 715.531813] env[61648]: DEBUG nova.compute.utils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.533277] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 715.533448] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.583249] env[61648]: DEBUG nova.policy [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5684d434eff3460684d26f9170d7f8c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '885858634ee54e4083fa205ea1f8d506', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 715.783503] env[61648]: INFO nova.scheduler.client.report [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Deleted allocations for instance 46672a70-7d6b-4a86-833b-a7583c71e595 [ 715.892791] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Successfully created port: 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.039312] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.043481] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6429709b-a115-41e0-bc00-32922718b55a tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.044427] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.591s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.045836] env[61648]: INFO nova.compute.claims [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.294029] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3cd9402f-fb79-4d3b-9090-22d5a5202cee tempest-ServersAdminTestJSON-327669079 tempest-ServersAdminTestJSON-327669079-project-member] Lock "46672a70-7d6b-4a86-833b-a7583c71e595" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.028s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.767718] env[61648]: DEBUG nova.compute.manager [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Received event network-changed-2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 716.767868] env[61648]: DEBUG nova.compute.manager [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Refreshing instance network info cache due to event network-changed-2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 716.768086] env[61648]: DEBUG oslo_concurrency.lockutils [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] Acquiring lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.768230] env[61648]: DEBUG oslo_concurrency.lockutils [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] Acquired lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.768390] env[61648]: DEBUG nova.network.neutron [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Refreshing network info cache for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.796848] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 716.999430] env[61648]: ERROR nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 716.999430] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 716.999430] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.999430] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.999430] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.999430] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.999430] env[61648]: ERROR nova.compute.manager raise self.value [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.999430] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 716.999430] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.999430] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 716.999986] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.999986] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 716.999986] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 716.999986] env[61648]: ERROR nova.compute.manager [ 716.999986] env[61648]: Traceback (most recent call last): [ 716.999986] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 716.999986] env[61648]: listener.cb(fileno) [ 716.999986] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 716.999986] env[61648]: result = function(*args, **kwargs) [ 716.999986] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 716.999986] env[61648]: return func(*args, **kwargs) [ 716.999986] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 716.999986] env[61648]: raise e [ 716.999986] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 716.999986] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 716.999986] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.999986] env[61648]: created_port_ids = self._update_ports_for_instance( [ 716.999986] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.999986] env[61648]: with excutils.save_and_reraise_exception(): [ 716.999986] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.999986] env[61648]: self.force_reraise() [ 716.999986] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.999986] env[61648]: raise self.value [ 716.999986] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.999986] env[61648]: updated_port = self._update_port( [ 716.999986] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.999986] env[61648]: _ensure_no_port_binding_failure(port) [ 716.999986] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.999986] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 717.000883] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 717.000883] env[61648]: Removing descriptor: 16 [ 717.064108] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.117815] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.118385] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.118385] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.118589] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.118703] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.118832] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.119253] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.119412] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.119513] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.119702] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.120035] env[61648]: DEBUG nova.virt.hardware [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.120705] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b52ad41-ff75-4106-a2f9-c8703590f571 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.131321] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1955ee-2eb3-4756-83f3-bdac46f61f33 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.146480] env[61648]: ERROR nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Traceback (most recent call last): [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] yield resources [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self.driver.spawn(context, instance, image_meta, [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] vm_ref = self.build_virtual_machine(instance, [ 717.146480] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] for vif in network_info: [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] return self._sync_wrapper(fn, *args, **kwargs) [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self.wait() [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self[:] = self._gt.wait() [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] return self._exit_event.wait() [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.146865] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] current.throw(*self._exc) [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] result = function(*args, **kwargs) [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] return func(*args, **kwargs) [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] raise e [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] nwinfo = self.network_api.allocate_for_instance( [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] created_port_ids = self._update_ports_for_instance( [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] with excutils.save_and_reraise_exception(): [ 717.147223] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self.force_reraise() [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] raise self.value [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] updated_port = self._update_port( [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] _ensure_no_port_binding_failure(port) [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] raise exception.PortBindingFailed(port_id=port['id']) [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 717.147667] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] [ 717.147667] env[61648]: INFO nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Terminating instance [ 717.150741] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquiring lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.305898] env[61648]: DEBUG nova.network.neutron [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.335884] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.480025] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398ec62b-afb7-42fb-a6da-64eb19445332 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.486021] env[61648]: DEBUG nova.network.neutron [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.490422] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacfa621-2ca3-4083-b207-5726de73fc0b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.519958] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490bd46f-5a20-4956-9fc8-573c4fb17fc9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.527838] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0121031c-bb21-4fde-b0e9-cf16d31a19ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.543407] env[61648]: DEBUG nova.compute.provider_tree [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.989017] env[61648]: DEBUG oslo_concurrency.lockutils [req-680f7fec-a3f5-4b3f-9a52-035870d4f085 req-55b32a7d-0255-4b3a-8dff-45bb74ce8251 service nova] Releasing lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.989518] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquired lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.989727] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.047914] env[61648]: DEBUG nova.scheduler.client.report [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 718.510839] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.553619] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.554401] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 718.560171] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.358s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.792604] env[61648]: DEBUG nova.compute.manager [req-7c1a04bf-5466-43cb-ac74-8c56da6e8900 req-41351a20-8fc1-4dfe-a690-91469800cf0b service nova] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Received event network-vif-deleted-2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 718.821682] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.070325] env[61648]: DEBUG nova.compute.utils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 719.075161] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 719.075161] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.105259] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.112019] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.166225] env[61648]: DEBUG nova.policy [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2c8bf8a6b824f029a58eb147a0ee568', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '797fb910b96349ba9b2b7b88b3b7958c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 719.324220] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Releasing lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.324639] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 719.324829] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.325536] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16fa1f16-4f9d-478f-ad55-3e8c4998dd52 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.335188] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448b1d05-6447-4405-b99c-092eaa4a5e87 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.359312] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a1a8b990-f4b7-4049-9345-562d1b5c180e could not be found. [ 719.359312] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.359312] env[61648]: INFO nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 719.359312] env[61648]: DEBUG oslo.service.loopingcall [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.361689] env[61648]: DEBUG nova.compute.manager [-] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.361796] env[61648]: DEBUG nova.network.neutron [-] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.391792] env[61648]: DEBUG nova.network.neutron [-] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.460312] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c928274-656f-4f7c-88b2-0b7e35d7c330 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.466248] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3085d50f-777b-451c-896f-ee6e2f5a89da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.499894] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f57bd22-388e-4bd7-8f2b-3c8c83f97537 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.509019] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e383b4c-964f-4d9e-a4ae-85d3c0ad1980 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.520314] env[61648]: DEBUG nova.compute.provider_tree [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.575425] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 719.616424] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.616774] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 719.621822] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 719.690200] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Successfully created port: d03e3d83-a6c1-4354-b3ca-0a3681a209b4 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.892680] env[61648]: DEBUG nova.network.neutron [-] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.025921] env[61648]: DEBUG nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.122643] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.122643] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.122643] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.122643] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.150022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "refresh_cache-55630bdb-fe38-49dc-baa2-2ac5de20e569" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.150022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquired lock "refresh_cache-55630bdb-fe38-49dc-baa2-2ac5de20e569" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.150022] env[61648]: DEBUG nova.network.neutron [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Forcefully refreshing network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 720.150022] env[61648]: DEBUG nova.objects.instance [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lazy-loading 'info_cache' on Instance uuid 55630bdb-fe38-49dc-baa2-2ac5de20e569 {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.395640] env[61648]: INFO nova.compute.manager [-] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Took 1.03 seconds to deallocate network for instance. [ 720.399246] env[61648]: DEBUG nova.compute.claims [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 720.399625] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.532147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.974s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.532783] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Traceback (most recent call last): [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self.driver.spawn(context, instance, image_meta, [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] vm_ref = self.build_virtual_machine(instance, [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] vif_infos = vmwarevif.get_vif_info(self._session, [ 720.532783] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] for vif in network_info: [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] return self._sync_wrapper(fn, *args, **kwargs) [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self.wait() [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self[:] = self._gt.wait() [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] return self._exit_event.wait() [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] current.throw(*self._exc) [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.533179] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] result = function(*args, **kwargs) [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] return func(*args, **kwargs) [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] raise e [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] nwinfo = self.network_api.allocate_for_instance( [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] created_port_ids = self._update_ports_for_instance( [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] with excutils.save_and_reraise_exception(): [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] self.force_reraise() [ 720.533558] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] raise self.value [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] updated_port = self._update_port( [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] _ensure_no_port_binding_failure(port) [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] raise exception.PortBindingFailed(port_id=port['id']) [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] nova.exception.PortBindingFailed: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. [ 720.533944] env[61648]: ERROR nova.compute.manager [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] [ 720.533944] env[61648]: DEBUG nova.compute.utils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 720.535097] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.517s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.536613] env[61648]: INFO nova.compute.claims [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.543827] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Build of instance a9d3592b-56f7-4823-bf0c-8b92ac4587bb was re-scheduled: Binding failed for port 8daa647a-0fbf-497a-a216-da829bf525a9, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 720.545080] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 720.545080] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.545080] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquired lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.545080] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.585336] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 720.618722] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 720.618722] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 720.618722] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.618930] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 720.618930] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.618930] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 720.618930] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 720.618930] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 720.619095] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 720.619128] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 720.619286] env[61648]: DEBUG nova.virt.hardware [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.620161] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99349e12-857a-4a02-a74d-3d9f7677f1da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.628663] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78117fa3-b85b-4b9a-aa0a-4569ad26106a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.857632] env[61648]: DEBUG nova.compute.manager [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Received event network-changed-d03e3d83-a6c1-4354-b3ca-0a3681a209b4 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 720.857865] env[61648]: DEBUG nova.compute.manager [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Refreshing instance network info cache due to event network-changed-d03e3d83-a6c1-4354-b3ca-0a3681a209b4. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 720.858716] env[61648]: DEBUG oslo_concurrency.lockutils [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] Acquiring lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.858897] env[61648]: DEBUG oslo_concurrency.lockutils [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] Acquired lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.859078] env[61648]: DEBUG nova.network.neutron [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Refreshing network info cache for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 720.911010] env[61648]: ERROR nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 720.911010] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.911010] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.911010] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.911010] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.911010] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.911010] env[61648]: ERROR nova.compute.manager raise self.value [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.911010] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 720.911010] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.911010] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 720.912207] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.912207] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 720.912207] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 720.912207] env[61648]: ERROR nova.compute.manager [ 720.912207] env[61648]: Traceback (most recent call last): [ 720.912207] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 720.912207] env[61648]: listener.cb(fileno) [ 720.912207] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.912207] env[61648]: result = function(*args, **kwargs) [ 720.912207] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.912207] env[61648]: return func(*args, **kwargs) [ 720.912207] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.912207] env[61648]: raise e [ 720.912207] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.912207] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 720.912207] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.912207] env[61648]: created_port_ids = self._update_ports_for_instance( [ 720.912207] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.912207] env[61648]: with excutils.save_and_reraise_exception(): [ 720.912207] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.912207] env[61648]: self.force_reraise() [ 720.912207] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.912207] env[61648]: raise self.value [ 720.912207] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.912207] env[61648]: updated_port = self._update_port( [ 720.912207] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.912207] env[61648]: _ensure_no_port_binding_failure(port) [ 720.912207] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.912207] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 720.913090] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 720.913090] env[61648]: Removing descriptor: 16 [ 720.913090] env[61648]: ERROR nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Traceback (most recent call last): [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] yield resources [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self.driver.spawn(context, instance, image_meta, [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 720.913090] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] vm_ref = self.build_virtual_machine(instance, [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] vif_infos = vmwarevif.get_vif_info(self._session, [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] for vif in network_info: [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return self._sync_wrapper(fn, *args, **kwargs) [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self.wait() [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self[:] = self._gt.wait() [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return self._exit_event.wait() [ 720.913477] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] result = hub.switch() [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return self.greenlet.switch() [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] result = function(*args, **kwargs) [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return func(*args, **kwargs) [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] raise e [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] nwinfo = self.network_api.allocate_for_instance( [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.913884] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] created_port_ids = self._update_ports_for_instance( [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] with excutils.save_and_reraise_exception(): [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self.force_reraise() [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] raise self.value [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] updated_port = self._update_port( [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] _ensure_no_port_binding_failure(port) [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.914324] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] raise exception.PortBindingFailed(port_id=port['id']) [ 720.914703] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 720.914703] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] [ 720.914703] env[61648]: INFO nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Terminating instance [ 720.916916] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquiring lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.078316] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.181146] env[61648]: DEBUG nova.network.neutron [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.230109] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.386783] env[61648]: DEBUG nova.network.neutron [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.468922] env[61648]: DEBUG nova.network.neutron [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.733498] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Releasing lock "refresh_cache-a9d3592b-56f7-4823-bf0c-8b92ac4587bb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.733723] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 721.733894] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 721.734540] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.750401] env[61648]: DEBUG nova.network.neutron [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.753664] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.930562] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0462c5-a2ae-4c06-a08f-3dc6918e2df6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.939297] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb993f5-810d-46a6-99f4-9c2f2c7ac962 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.970666] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b288f9-f4da-43e5-be89-c63c31846f37 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.973960] env[61648]: DEBUG oslo_concurrency.lockutils [req-73bb57ff-37ce-4437-a391-3f5fd97f0f17 req-c9b624cf-75b8-452a-b324-76e0f433b81e service nova] Releasing lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.973960] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquired lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.974126] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.983247] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f3d540-caf9-4e81-a2ab-7f80327c46ca {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.994977] env[61648]: DEBUG nova.compute.provider_tree [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.254302] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Releasing lock "refresh_cache-55630bdb-fe38-49dc-baa2-2ac5de20e569" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.254564] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Updated the network info_cache for instance {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 722.254762] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.255113] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.256190] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.256381] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.256541] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.258399] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.258625] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.258820] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 722.259036] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.492289] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.499078] env[61648]: DEBUG nova.scheduler.client.report [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.602877] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.762094] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: a9d3592b-56f7-4823-bf0c-8b92ac4587bb] Took 1.03 seconds to deallocate network for instance. [ 722.762847] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.961922] env[61648]: DEBUG nova.compute.manager [req-0f023086-6016-4918-b787-e084abccd808 req-cdff33c3-3add-4e15-9316-38e69b6b2efe service nova] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Received event network-vif-deleted-d03e3d83-a6c1-4354-b3ca-0a3681a209b4 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.010321] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.010321] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 723.017076] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.033s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.017989] env[61648]: INFO nova.compute.claims [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.104500] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Releasing lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.104931] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 723.105125] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.105694] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5dbeb96-0806-4660-a31f-bc0b2bf98c6c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.114202] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f70029-1e36-4424-bcc6-de1ff8c5e2fd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.134922] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a978aa73-3f2a-4a87-bda3-bcde3028a646 could not be found. [ 723.135162] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.135344] env[61648]: INFO nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Took 0.03 seconds to destroy the instance on the hypervisor. [ 723.135582] env[61648]: DEBUG oslo.service.loopingcall [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.135789] env[61648]: DEBUG nova.compute.manager [-] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 723.135885] env[61648]: DEBUG nova.network.neutron [-] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.151669] env[61648]: DEBUG nova.network.neutron [-] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.525019] env[61648]: DEBUG nova.compute.utils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 723.530132] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 723.530302] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 723.597745] env[61648]: DEBUG nova.policy [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0ab99bb72a14593868586021ab3f515', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1cb2598a007b4973814e853c84cb4413', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 723.654523] env[61648]: DEBUG nova.network.neutron [-] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.802750] env[61648]: INFO nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Deleted allocations for instance a9d3592b-56f7-4823-bf0c-8b92ac4587bb [ 724.034202] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 724.097598] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Successfully created port: 932c5e74-0d34-44ef-8bda-e68b9a72b607 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 724.160321] env[61648]: INFO nova.compute.manager [-] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Took 1.02 seconds to deallocate network for instance. [ 724.161718] env[61648]: DEBUG nova.compute.claims [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 724.162081] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.315547] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "a9d3592b-56f7-4823-bf0c-8b92ac4587bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.070s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.424596] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38eb8e0d-c787-4398-889d-1d851a983f7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.438356] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504e364b-7ce0-44ba-9d44-2ac55dcabab8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.472522] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1f7037-32e6-4396-92ff-e80db056c163 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.480226] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c788223-dd32-4fd8-952f-39c8c7bf3cf9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.493946] env[61648]: DEBUG nova.compute.provider_tree [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.822696] env[61648]: DEBUG nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 724.998651] env[61648]: DEBUG nova.scheduler.client.report [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.044150] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 725.055043] env[61648]: DEBUG nova.compute.manager [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Received event network-changed-932c5e74-0d34-44ef-8bda-e68b9a72b607 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.055255] env[61648]: DEBUG nova.compute.manager [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Refreshing instance network info cache due to event network-changed-932c5e74-0d34-44ef-8bda-e68b9a72b607. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.055466] env[61648]: DEBUG oslo_concurrency.lockutils [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] Acquiring lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.055622] env[61648]: DEBUG oslo_concurrency.lockutils [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] Acquired lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.056210] env[61648]: DEBUG nova.network.neutron [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Refreshing network info cache for port 932c5e74-0d34-44ef-8bda-e68b9a72b607 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.083956] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.084234] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.084357] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.084573] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.085099] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.085327] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.085483] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.085723] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.085860] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.085988] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.086176] env[61648]: DEBUG nova.virt.hardware [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.087320] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824277d5-7ba9-4e62-81da-3da3702662a7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.097109] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f35de8-adda-47a5-ab10-0c25803f4f87 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.351162] env[61648]: ERROR nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 725.351162] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.351162] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.351162] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.351162] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.351162] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.351162] env[61648]: ERROR nova.compute.manager raise self.value [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.351162] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 725.351162] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.351162] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 725.351766] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.351766] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 725.351766] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 725.351766] env[61648]: ERROR nova.compute.manager [ 725.351766] env[61648]: Traceback (most recent call last): [ 725.351766] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 725.351766] env[61648]: listener.cb(fileno) [ 725.351766] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.351766] env[61648]: result = function(*args, **kwargs) [ 725.351766] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.351766] env[61648]: return func(*args, **kwargs) [ 725.351766] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.351766] env[61648]: raise e [ 725.351766] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.351766] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 725.351766] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.351766] env[61648]: created_port_ids = self._update_ports_for_instance( [ 725.351766] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.351766] env[61648]: with excutils.save_and_reraise_exception(): [ 725.351766] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.351766] env[61648]: self.force_reraise() [ 725.351766] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.351766] env[61648]: raise self.value [ 725.351766] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.351766] env[61648]: updated_port = self._update_port( [ 725.351766] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.351766] env[61648]: _ensure_no_port_binding_failure(port) [ 725.351766] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.351766] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 725.352830] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 725.352830] env[61648]: Removing descriptor: 19 [ 725.352830] env[61648]: ERROR nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Traceback (most recent call last): [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] yield resources [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self.driver.spawn(context, instance, image_meta, [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.352830] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] vm_ref = self.build_virtual_machine(instance, [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] for vif in network_info: [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return self._sync_wrapper(fn, *args, **kwargs) [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self.wait() [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self[:] = self._gt.wait() [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return self._exit_event.wait() [ 725.353274] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] result = hub.switch() [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return self.greenlet.switch() [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] result = function(*args, **kwargs) [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return func(*args, **kwargs) [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] raise e [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] nwinfo = self.network_api.allocate_for_instance( [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.353751] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] created_port_ids = self._update_ports_for_instance( [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] with excutils.save_and_reraise_exception(): [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self.force_reraise() [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] raise self.value [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] updated_port = self._update_port( [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] _ensure_no_port_binding_failure(port) [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.354166] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] raise exception.PortBindingFailed(port_id=port['id']) [ 725.354945] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 725.354945] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] [ 725.354945] env[61648]: INFO nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Terminating instance [ 725.355406] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.355547] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.367242] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquiring lock "e199f125-9259-4268-9aaf-1f4d10da9a34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.367520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Lock "e199f125-9259-4268-9aaf-1f4d10da9a34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.503865] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.504542] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 725.508479] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.650s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.508708] env[61648]: DEBUG nova.objects.instance [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61648) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 725.592466] env[61648]: DEBUG nova.network.neutron [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.674035] env[61648]: DEBUG nova.network.neutron [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.014025] env[61648]: DEBUG nova.compute.utils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.015332] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 726.015497] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 726.081544] env[61648]: DEBUG nova.policy [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22889f613f6746fb9dfefc04155d562b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba391231cb6445d4ae2cc683ca4e5bcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 726.177210] env[61648]: DEBUG oslo_concurrency.lockutils [req-a207895e-01c6-4aa8-a54b-39d090405db3 req-62619c02-f98a-4a9b-8e05-58224c2667ca service nova] Releasing lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.177664] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquired lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.177835] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.422110] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Successfully created port: 034e1507-1467-4dbc-a6d7-51b217eb67b5 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.518980] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 726.526686] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dbe8d509-22d0-4c0d-9af8-4fd33c06e387 tempest-ServersAdmin275Test-564011388 tempest-ServersAdmin275Test-564011388-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.527658] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.527s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.529182] env[61648]: INFO nova.compute.claims [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.699048] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.803822] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.101098] env[61648]: DEBUG nova.compute.manager [req-dc2553ea-68ba-48f1-bc8b-0611e5917707 req-c0e98530-9c10-4540-9b17-6394bbd7641c service nova] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Received event network-vif-deleted-932c5e74-0d34-44ef-8bda-e68b9a72b607 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.307346] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Releasing lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.307867] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 727.308380] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.311585] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9cc4fcbe-664f-45ea-8052-9fc5230b6ed3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.320763] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90115e9e-495d-4b27-a6d1-a26ae60fb03f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.348823] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a321a24-0f87-47e7-8364-5da5f6a65131 could not be found. [ 727.348823] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.348823] env[61648]: INFO nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Took 0.04 seconds to destroy the instance on the hypervisor. [ 727.348823] env[61648]: DEBUG oslo.service.loopingcall [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.348823] env[61648]: DEBUG nova.compute.manager [-] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 727.348823] env[61648]: DEBUG nova.network.neutron [-] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.365309] env[61648]: DEBUG nova.network.neutron [-] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.502332] env[61648]: ERROR nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 727.502332] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.502332] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.502332] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.502332] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.502332] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.502332] env[61648]: ERROR nova.compute.manager raise self.value [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.502332] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 727.502332] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.502332] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 727.502888] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.502888] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 727.502888] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 727.502888] env[61648]: ERROR nova.compute.manager [ 727.502888] env[61648]: Traceback (most recent call last): [ 727.502888] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 727.502888] env[61648]: listener.cb(fileno) [ 727.502888] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.502888] env[61648]: result = function(*args, **kwargs) [ 727.502888] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.502888] env[61648]: return func(*args, **kwargs) [ 727.502888] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.502888] env[61648]: raise e [ 727.502888] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.502888] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 727.502888] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.502888] env[61648]: created_port_ids = self._update_ports_for_instance( [ 727.502888] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.502888] env[61648]: with excutils.save_and_reraise_exception(): [ 727.502888] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.502888] env[61648]: self.force_reraise() [ 727.502888] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.502888] env[61648]: raise self.value [ 727.502888] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.502888] env[61648]: updated_port = self._update_port( [ 727.502888] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.502888] env[61648]: _ensure_no_port_binding_failure(port) [ 727.502888] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.502888] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 727.503780] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 727.503780] env[61648]: Removing descriptor: 19 [ 727.532302] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 727.566221] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.566470] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.566645] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.566830] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.566973] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.567131] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.567389] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.567527] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.567801] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.567904] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.568449] env[61648]: DEBUG nova.virt.hardware [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.569322] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c023d02-721e-473e-8da9-0a01aafb1b0d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.579504] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6427d506-19e3-4258-a03a-4bb43f475875 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.595375] env[61648]: ERROR nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Traceback (most recent call last): [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] yield resources [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self.driver.spawn(context, instance, image_meta, [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] vm_ref = self.build_virtual_machine(instance, [ 727.595375] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] for vif in network_info: [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] return self._sync_wrapper(fn, *args, **kwargs) [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self.wait() [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self[:] = self._gt.wait() [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] return self._exit_event.wait() [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 727.595767] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] current.throw(*self._exc) [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] result = function(*args, **kwargs) [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] return func(*args, **kwargs) [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] raise e [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] nwinfo = self.network_api.allocate_for_instance( [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] created_port_ids = self._update_ports_for_instance( [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] with excutils.save_and_reraise_exception(): [ 727.596212] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self.force_reraise() [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] raise self.value [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] updated_port = self._update_port( [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] _ensure_no_port_binding_failure(port) [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] raise exception.PortBindingFailed(port_id=port['id']) [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 727.596657] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] [ 727.596657] env[61648]: INFO nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Terminating instance [ 727.599427] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquiring lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.599589] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquired lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.599751] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.869410] env[61648]: DEBUG nova.network.neutron [-] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.919315] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37c7602-fbd2-449a-a92d-5e6eec1467c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.927344] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746c0e99-0a8d-4014-b144-25aac86d3909 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.961602] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de87d09c-aa94-4578-b065-dff1b7798ee2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.971338] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666809d3-da6e-4c55-808d-148f0c2b0a71 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.984786] env[61648]: DEBUG nova.compute.provider_tree [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.121023] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.223742] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.377895] env[61648]: INFO nova.compute.manager [-] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Took 1.03 seconds to deallocate network for instance. [ 728.379030] env[61648]: DEBUG nova.compute.claims [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 728.379442] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.494018] env[61648]: DEBUG nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 728.730044] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Releasing lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.730044] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 728.730044] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.730044] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76252364-1f57-42a1-8acf-2bb88592e183 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.738394] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd66d0b1-ce74-42de-93d6-f9d80c56a7e4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.769018] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297 could not be found. [ 728.769018] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.769018] env[61648]: INFO nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Took 0.04 seconds to destroy the instance on the hypervisor. [ 728.769018] env[61648]: DEBUG oslo.service.loopingcall [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 728.769018] env[61648]: DEBUG nova.compute.manager [-] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 728.769018] env[61648]: DEBUG nova.network.neutron [-] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.789129] env[61648]: DEBUG nova.network.neutron [-] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.999445] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.999445] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 729.001358] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.047s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.001748] env[61648]: DEBUG nova.objects.instance [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lazy-loading 'resources' on Instance uuid 55630bdb-fe38-49dc-baa2-2ac5de20e569 {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 729.127288] env[61648]: DEBUG nova.compute.manager [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Received event network-changed-034e1507-1467-4dbc-a6d7-51b217eb67b5 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.127288] env[61648]: DEBUG nova.compute.manager [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Refreshing instance network info cache due to event network-changed-034e1507-1467-4dbc-a6d7-51b217eb67b5. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 729.127288] env[61648]: DEBUG oslo_concurrency.lockutils [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] Acquiring lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.127288] env[61648]: DEBUG oslo_concurrency.lockutils [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] Acquired lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.127288] env[61648]: DEBUG nova.network.neutron [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Refreshing network info cache for port 034e1507-1467-4dbc-a6d7-51b217eb67b5 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 729.291192] env[61648]: DEBUG nova.network.neutron [-] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.505256] env[61648]: DEBUG nova.compute.utils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 729.510810] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 729.512100] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 729.571307] env[61648]: DEBUG nova.policy [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab719482a69c4ba2b0725bb68a05930c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e97d3c0049d747fe80907ef09f3ed754', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 729.649292] env[61648]: DEBUG nova.network.neutron [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.727244] env[61648]: DEBUG nova.network.neutron [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.794478] env[61648]: INFO nova.compute.manager [-] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Took 1.03 seconds to deallocate network for instance. [ 729.795688] env[61648]: DEBUG nova.compute.claims [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 729.795688] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.847934] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b484014d-36f0-42a2-8735-a0956b7de704 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.860434] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968f2cc6-9016-4f4d-95c3-8cddb67c9583 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.867063] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Successfully created port: 21dfc9d8-7b33-46b1-a748-06f75cc65b71 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 729.901546] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83737c4-f137-4652-8a4b-b80ae60c1176 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.909544] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dc1648-fe36-4240-85a9-a3df69e6c6ae {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.923958] env[61648]: DEBUG nova.compute.provider_tree [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.011330] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 730.232099] env[61648]: DEBUG oslo_concurrency.lockutils [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] Releasing lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.232390] env[61648]: DEBUG nova.compute.manager [req-d7370e39-4347-4f26-add7-b234316db7a9 req-21c1e59a-246f-484c-859a-1c83a1f85f72 service nova] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Received event network-vif-deleted-034e1507-1467-4dbc-a6d7-51b217eb67b5 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 730.426892] env[61648]: DEBUG nova.scheduler.client.report [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 730.935024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.936484] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.868s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.970720] env[61648]: INFO nova.scheduler.client.report [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Deleted allocations for instance 55630bdb-fe38-49dc-baa2-2ac5de20e569 [ 731.022286] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 731.066129] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.066676] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.066753] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.066926] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.067142] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.067751] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.067751] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.067751] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.067921] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.067960] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.068449] env[61648]: DEBUG nova.virt.hardware [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.069646] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e93a948-dc97-4757-93f9-23f15f6dea17 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.080753] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99eed94-72d6-4fd7-b79a-7c590795d6d8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.101346] env[61648]: ERROR nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 731.101346] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.101346] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.101346] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.101346] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.101346] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.101346] env[61648]: ERROR nova.compute.manager raise self.value [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.101346] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 731.101346] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.101346] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 731.101902] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.101902] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 731.101902] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 731.101902] env[61648]: ERROR nova.compute.manager [ 731.101902] env[61648]: Traceback (most recent call last): [ 731.101902] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 731.101902] env[61648]: listener.cb(fileno) [ 731.101902] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.101902] env[61648]: result = function(*args, **kwargs) [ 731.101902] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.101902] env[61648]: return func(*args, **kwargs) [ 731.101902] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.101902] env[61648]: raise e [ 731.101902] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.101902] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 731.101902] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.101902] env[61648]: created_port_ids = self._update_ports_for_instance( [ 731.101902] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.101902] env[61648]: with excutils.save_and_reraise_exception(): [ 731.101902] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.101902] env[61648]: self.force_reraise() [ 731.101902] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.101902] env[61648]: raise self.value [ 731.101902] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.101902] env[61648]: updated_port = self._update_port( [ 731.101902] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.101902] env[61648]: _ensure_no_port_binding_failure(port) [ 731.101902] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.101902] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 731.102729] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 731.102729] env[61648]: Removing descriptor: 19 [ 731.102729] env[61648]: ERROR nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Traceback (most recent call last): [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] yield resources [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self.driver.spawn(context, instance, image_meta, [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.102729] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] vm_ref = self.build_virtual_machine(instance, [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] for vif in network_info: [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return self._sync_wrapper(fn, *args, **kwargs) [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self.wait() [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self[:] = self._gt.wait() [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return self._exit_event.wait() [ 731.103098] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] result = hub.switch() [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return self.greenlet.switch() [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] result = function(*args, **kwargs) [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return func(*args, **kwargs) [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] raise e [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] nwinfo = self.network_api.allocate_for_instance( [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.103483] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] created_port_ids = self._update_ports_for_instance( [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] with excutils.save_and_reraise_exception(): [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self.force_reraise() [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] raise self.value [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] updated_port = self._update_port( [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] _ensure_no_port_binding_failure(port) [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.103915] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] raise exception.PortBindingFailed(port_id=port['id']) [ 731.104270] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 731.104270] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] [ 731.104270] env[61648]: INFO nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Terminating instance [ 731.105131] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.105131] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.105131] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.159926] env[61648]: DEBUG nova.compute.manager [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Received event network-changed-21dfc9d8-7b33-46b1-a748-06f75cc65b71 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 731.160175] env[61648]: DEBUG nova.compute.manager [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Refreshing instance network info cache due to event network-changed-21dfc9d8-7b33-46b1-a748-06f75cc65b71. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 731.160364] env[61648]: DEBUG oslo_concurrency.lockutils [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] Acquiring lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.485023] env[61648]: DEBUG oslo_concurrency.lockutils [None req-92a399cd-170d-4596-8a29-366bed44acde tempest-ServersAdmin275Test-1494233708 tempest-ServersAdmin275Test-1494233708-project-member] Lock "55630bdb-fe38-49dc-baa2-2ac5de20e569" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.288s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.635183] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.754605] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.809679] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6809da-1bba-4085-ade4-ae2b1161abda {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.817619] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02584644-3a86-4fb6-b3b2-d8e176b6796e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.846399] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b492b731-8da6-4552-942e-f93eef4c506f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.852999] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a99ec9-6ca3-4152-a06e-70ffc65323e0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.865732] env[61648]: DEBUG nova.compute.provider_tree [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.257230] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.257691] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 732.257876] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.258218] env[61648]: DEBUG oslo_concurrency.lockutils [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] Acquired lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.258390] env[61648]: DEBUG nova.network.neutron [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Refreshing network info cache for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 732.259481] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48bf9731-a5d0-4dad-bbed-d58fce2b7c04 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.268806] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4732c835-61d2-4bd7-8b34-7650ba99a235 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.291565] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 959931f5-eebc-4544-af88-ea231301b4a5 could not be found. [ 732.293246] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.293246] env[61648]: INFO nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 732.293246] env[61648]: DEBUG oslo.service.loopingcall [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.293246] env[61648]: DEBUG nova.compute.manager [-] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 732.293246] env[61648]: DEBUG nova.network.neutron [-] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.311644] env[61648]: DEBUG nova.network.neutron [-] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.368852] env[61648]: DEBUG nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.779970] env[61648]: DEBUG nova.network.neutron [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.815817] env[61648]: DEBUG nova.network.neutron [-] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.874464] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.938s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.875123] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] Traceback (most recent call last): [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self.driver.spawn(context, instance, image_meta, [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] vm_ref = self.build_virtual_machine(instance, [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.875123] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] for vif in network_info: [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return self._sync_wrapper(fn, *args, **kwargs) [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self.wait() [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self[:] = self._gt.wait() [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return self._exit_event.wait() [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] result = hub.switch() [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 732.875728] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return self.greenlet.switch() [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] result = function(*args, **kwargs) [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] return func(*args, **kwargs) [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] raise e [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] nwinfo = self.network_api.allocate_for_instance( [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] created_port_ids = self._update_ports_for_instance( [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] with excutils.save_and_reraise_exception(): [ 732.876509] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] self.force_reraise() [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] raise self.value [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] updated_port = self._update_port( [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] _ensure_no_port_binding_failure(port) [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] raise exception.PortBindingFailed(port_id=port['id']) [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] nova.exception.PortBindingFailed: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. [ 732.877377] env[61648]: ERROR nova.compute.manager [instance: b07648a0-23a5-4dee-9582-ce393292b768] [ 732.878243] env[61648]: DEBUG nova.compute.utils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 732.878243] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.993s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.881453] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Build of instance b07648a0-23a5-4dee-9582-ce393292b768 was re-scheduled: Binding failed for port a464193c-7358-4c31-8701-09510a31be57, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 732.881932] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 732.882181] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.882327] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquired lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.882481] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.887018] env[61648]: DEBUG nova.network.neutron [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.318476] env[61648]: INFO nova.compute.manager [-] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Took 1.03 seconds to deallocate network for instance. [ 733.320927] env[61648]: DEBUG nova.compute.claims [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 733.321188] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.390425] env[61648]: DEBUG oslo_concurrency.lockutils [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] Releasing lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.390425] env[61648]: DEBUG nova.compute.manager [req-dc262b86-53f7-4ee1-a4fa-60df962a5837 req-c981cbef-c4e1-4339-a087-62c41219ec86 service nova] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Received event network-vif-deleted-21dfc9d8-7b33-46b1-a748-06f75cc65b71 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.412513] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.579395] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.769245] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95e632d-0b60-4714-aae5-39faa06bbd13 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.777064] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9224cfed-ee37-434d-9afc-a301fb80664a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.826645] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c372ac-399b-461b-a6e7-03ac23501919 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.838250] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7a1abe-b011-461f-bc04-3ff40035d5fb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.852979] env[61648]: DEBUG nova.compute.provider_tree [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.082492] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Releasing lock "refresh_cache-b07648a0-23a5-4dee-9582-ce393292b768" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.082729] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 734.082920] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.083133] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.101824] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.356291] env[61648]: DEBUG nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.606727] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.861986] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.985s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.863938] env[61648]: ERROR nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Traceback (most recent call last): [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self.driver.spawn(context, instance, image_meta, [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] vm_ref = self.build_virtual_machine(instance, [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] vif_infos = vmwarevif.get_vif_info(self._session, [ 734.863938] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] for vif in network_info: [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return self._sync_wrapper(fn, *args, **kwargs) [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self.wait() [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self[:] = self._gt.wait() [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return self._exit_event.wait() [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] result = hub.switch() [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 734.864754] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return self.greenlet.switch() [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] result = function(*args, **kwargs) [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] return func(*args, **kwargs) [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] raise e [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] nwinfo = self.network_api.allocate_for_instance( [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] created_port_ids = self._update_ports_for_instance( [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] with excutils.save_and_reraise_exception(): [ 734.866613] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] self.force_reraise() [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] raise self.value [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] updated_port = self._update_port( [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] _ensure_no_port_binding_failure(port) [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] raise exception.PortBindingFailed(port_id=port['id']) [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] nova.exception.PortBindingFailed: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. [ 734.867044] env[61648]: ERROR nova.compute.manager [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] [ 734.867382] env[61648]: DEBUG nova.compute.utils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 734.867872] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.763s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.870675] env[61648]: INFO nova.compute.claims [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.873949] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Build of instance e9d5a8b8-afc2-40dc-b480-0b946e085e18 was re-scheduled: Binding failed for port fe518c10-5f4a-4f0d-953b-14595a2cca7d, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 734.877076] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 734.877076] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquiring lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.877076] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Acquired lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.877076] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.111379] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: b07648a0-23a5-4dee-9582-ce393292b768] Took 1.03 seconds to deallocate network for instance. [ 735.407620] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.513106] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.018969] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Releasing lock "refresh_cache-e9d5a8b8-afc2-40dc-b480-0b946e085e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.018969] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 736.018969] env[61648]: DEBUG nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 736.018969] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.055848] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.143723] env[61648]: INFO nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Deleted allocations for instance b07648a0-23a5-4dee-9582-ce393292b768 [ 736.288488] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af8c52b-b0e5-4331-a5df-d51ed3306302 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.297090] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f13b158-6d17-4a24-8730-182eb8dbc64b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.334480] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a82006-e5cc-4380-9b71-7f46843b9f93 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.342199] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e43cda5-b581-4584-a240-2785fc256f36 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.355862] env[61648]: DEBUG nova.compute.provider_tree [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.559115] env[61648]: DEBUG nova.network.neutron [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.654418] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "b07648a0-23a5-4dee-9582-ce393292b768" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 161.383s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.859138] env[61648]: DEBUG nova.scheduler.client.report [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 737.062465] env[61648]: INFO nova.compute.manager [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] [instance: e9d5a8b8-afc2-40dc-b480-0b946e085e18] Took 1.04 seconds to deallocate network for instance. [ 737.157384] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 737.364056] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.367119] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 737.368413] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.033s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.370581] env[61648]: INFO nova.compute.claims [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.517590] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "c399cd8d-6cad-43d8-9226-36f9d9c247e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.518061] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "c399cd8d-6cad-43d8-9226-36f9d9c247e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.686406] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.877275] env[61648]: DEBUG nova.compute.utils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.882188] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 737.882188] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.940077] env[61648]: DEBUG nova.policy [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8658758cec10421ea417eb40a1a88ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61bcab083e6b4e1da5a11cfc573e1e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 738.096307] env[61648]: INFO nova.scheduler.client.report [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Deleted allocations for instance e9d5a8b8-afc2-40dc-b480-0b946e085e18 [ 738.298107] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Successfully created port: d5cf109b-2ef4-4737-8883-e77a128d2dcb {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.380875] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 738.608577] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e5960bd2-f241-480d-9fcd-9f27397b725b tempest-ListServersNegativeTestJSON-1533546085 tempest-ListServersNegativeTestJSON-1533546085-project-member] Lock "e9d5a8b8-afc2-40dc-b480-0b946e085e18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.302s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.736145] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2effb3-106d-4d60-9bc3-fac3e68692a0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.744180] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d83e01-c596-4e5c-a69e-78c28940777c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.774534] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b586a2-51b8-4bec-bb64-4f9956f88b71 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.783189] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cec3a6a-3a43-4711-9ede-76e9db0778ff {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.798739] env[61648]: DEBUG nova.compute.provider_tree [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.111194] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 739.251846] env[61648]: DEBUG nova.compute.manager [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Received event network-changed-d5cf109b-2ef4-4737-8883-e77a128d2dcb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.252909] env[61648]: DEBUG nova.compute.manager [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Refreshing instance network info cache due to event network-changed-d5cf109b-2ef4-4737-8883-e77a128d2dcb. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 739.252909] env[61648]: DEBUG oslo_concurrency.lockutils [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] Acquiring lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.252909] env[61648]: DEBUG oslo_concurrency.lockutils [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] Acquired lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.252909] env[61648]: DEBUG nova.network.neutron [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Refreshing network info cache for port d5cf109b-2ef4-4737-8883-e77a128d2dcb {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.304541] env[61648]: DEBUG nova.scheduler.client.report [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.355476] env[61648]: ERROR nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 739.355476] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.355476] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 739.355476] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 739.355476] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.355476] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.355476] env[61648]: ERROR nova.compute.manager raise self.value [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 739.355476] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 739.355476] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.355476] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 739.356037] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.356037] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 739.356037] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 739.356037] env[61648]: ERROR nova.compute.manager [ 739.356037] env[61648]: Traceback (most recent call last): [ 739.356037] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 739.356037] env[61648]: listener.cb(fileno) [ 739.356037] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.356037] env[61648]: result = function(*args, **kwargs) [ 739.356037] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 739.356037] env[61648]: return func(*args, **kwargs) [ 739.356037] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.356037] env[61648]: raise e [ 739.356037] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.356037] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 739.356037] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 739.356037] env[61648]: created_port_ids = self._update_ports_for_instance( [ 739.356037] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 739.356037] env[61648]: with excutils.save_and_reraise_exception(): [ 739.356037] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.356037] env[61648]: self.force_reraise() [ 739.356037] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.356037] env[61648]: raise self.value [ 739.356037] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 739.356037] env[61648]: updated_port = self._update_port( [ 739.356037] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.356037] env[61648]: _ensure_no_port_binding_failure(port) [ 739.356037] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.356037] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 739.357057] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 739.357057] env[61648]: Removing descriptor: 19 [ 739.392644] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 739.430786] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 739.431032] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 739.431247] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.431436] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 739.431576] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.431715] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 739.431923] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 739.433016] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 739.433016] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 739.433016] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 739.433016] env[61648]: DEBUG nova.virt.hardware [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.433495] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e16056b-eb4c-4950-8466-e9984cbba5e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.441728] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e493097-3254-46fc-b21e-88f67f3c09cb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.456472] env[61648]: ERROR nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Traceback (most recent call last): [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] yield resources [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self.driver.spawn(context, instance, image_meta, [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] vm_ref = self.build_virtual_machine(instance, [ 739.456472] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] vif_infos = vmwarevif.get_vif_info(self._session, [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] for vif in network_info: [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] return self._sync_wrapper(fn, *args, **kwargs) [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self.wait() [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self[:] = self._gt.wait() [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] return self._exit_event.wait() [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 739.456858] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] current.throw(*self._exc) [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] result = function(*args, **kwargs) [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] return func(*args, **kwargs) [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] raise e [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] nwinfo = self.network_api.allocate_for_instance( [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] created_port_ids = self._update_ports_for_instance( [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] with excutils.save_and_reraise_exception(): [ 739.457216] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self.force_reraise() [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] raise self.value [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] updated_port = self._update_port( [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] _ensure_no_port_binding_failure(port) [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] raise exception.PortBindingFailed(port_id=port['id']) [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 739.457570] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] [ 739.457570] env[61648]: INFO nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Terminating instance [ 739.458883] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.641447] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.809832] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.810379] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 739.813211] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.414s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.836508] env[61648]: DEBUG nova.network.neutron [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.965921] env[61648]: DEBUG nova.network.neutron [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.318169] env[61648]: DEBUG nova.compute.utils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.319527] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 740.319699] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 740.396318] env[61648]: DEBUG nova.policy [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '787d9130823549909ab3df06868bb3af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2154e6782ba43d1b1304d2b07ce91a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 740.472259] env[61648]: DEBUG oslo_concurrency.lockutils [req-c53839dd-9b6f-4e1b-9de4-03590981bd00 req-98fc4ab3-0cff-45c9-991e-43520363e0b8 service nova] Releasing lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.473658] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.473658] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.704818] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ae3110-36cd-476f-92e6-327f91277381 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.712821] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0863c126-7cb4-4ced-b202-5c9444868a68 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.744356] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efafaa6-6c53-40f7-b117-835a025f3021 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.751823] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96509caa-7df1-4148-b4d9-d7aca5ecdd9f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.766412] env[61648]: DEBUG nova.compute.provider_tree [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.823427] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 740.952535] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Successfully created port: a6bd2153-910b-4f71-acb0-1f550dc324b8 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.999505] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.108225] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.271628] env[61648]: DEBUG nova.scheduler.client.report [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 741.296361] env[61648]: DEBUG nova.compute.manager [req-9f949f11-6c1d-4eb8-829c-f919154a5158 req-71d7b6fa-5a86-4753-ae73-2d16ae7e3cb2 service nova] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Received event network-vif-deleted-d5cf109b-2ef4-4737-8883-e77a128d2dcb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.616020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.616020] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 741.616020] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 741.616020] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb8265af-bdde-4e1b-a223-99684d61cc56 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.627800] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a064dfc6-cbce-4c98-8f79-bf82eeb04957 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.654009] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 831da774-5e37-4d49-a1fd-3eb421c7fcb7 could not be found. [ 741.654267] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.654458] env[61648]: INFO nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 741.654701] env[61648]: DEBUG oslo.service.loopingcall [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.654909] env[61648]: DEBUG nova.compute.manager [-] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 741.655080] env[61648]: DEBUG nova.network.neutron [-] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.672504] env[61648]: DEBUG nova.network.neutron [-] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.775219] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.776476] env[61648]: ERROR nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Traceback (most recent call last): [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self.driver.spawn(context, instance, image_meta, [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] vm_ref = self.build_virtual_machine(instance, [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.776476] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] for vif in network_info: [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] return self._sync_wrapper(fn, *args, **kwargs) [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self.wait() [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self[:] = self._gt.wait() [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] return self._exit_event.wait() [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] current.throw(*self._exc) [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.777077] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] result = function(*args, **kwargs) [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] return func(*args, **kwargs) [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] raise e [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] nwinfo = self.network_api.allocate_for_instance( [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] created_port_ids = self._update_ports_for_instance( [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] with excutils.save_and_reraise_exception(): [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] self.force_reraise() [ 741.777837] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] raise self.value [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] updated_port = self._update_port( [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] _ensure_no_port_binding_failure(port) [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] raise exception.PortBindingFailed(port_id=port['id']) [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] nova.exception.PortBindingFailed: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. [ 741.778839] env[61648]: ERROR nova.compute.manager [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] [ 741.778839] env[61648]: DEBUG nova.compute.utils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 741.779306] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 19.016s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.779306] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.779306] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 741.779306] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.617s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.782387] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Build of instance a1a8b990-f4b7-4049-9345-562d1b5c180e was re-scheduled: Binding failed for port 2d1d56a9-747b-4cd4-bdf9-ed127ae0ce8c, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 741.782803] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 741.783075] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquiring lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.783169] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Acquired lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.783363] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.784932] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f1078f-d723-4619-97bb-a2a0144ad746 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.801855] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de029fc3-35e9-47e6-8939-3ff0894b4120 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.818169] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461ec949-ff1b-4ddc-acb9-9c8da3f8079d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.824931] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4982e9a-9a2d-419d-a848-061a468fd6a0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.833379] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 741.860670] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181436MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 741.860670] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.885238] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.885883] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.885883] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.885883] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.886112] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.886112] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.886289] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.886445] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.886605] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.886763] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.887023] env[61648]: DEBUG nova.virt.hardware [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.888462] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09dea35-3925-49e2-add3-d75dac446ea0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.900809] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b017c104-6b75-48aa-84df-05c0829b8884 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.175189] env[61648]: DEBUG nova.network.neutron [-] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.234939] env[61648]: ERROR nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 742.234939] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.234939] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.234939] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.234939] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.234939] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.234939] env[61648]: ERROR nova.compute.manager raise self.value [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.234939] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 742.234939] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.234939] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 742.235630] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.235630] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 742.235630] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 742.235630] env[61648]: ERROR nova.compute.manager [ 742.235630] env[61648]: Traceback (most recent call last): [ 742.235630] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 742.235630] env[61648]: listener.cb(fileno) [ 742.235630] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.235630] env[61648]: result = function(*args, **kwargs) [ 742.235630] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 742.235630] env[61648]: return func(*args, **kwargs) [ 742.235630] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.235630] env[61648]: raise e [ 742.235630] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.235630] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 742.235630] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.235630] env[61648]: created_port_ids = self._update_ports_for_instance( [ 742.235630] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.235630] env[61648]: with excutils.save_and_reraise_exception(): [ 742.235630] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.235630] env[61648]: self.force_reraise() [ 742.235630] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.235630] env[61648]: raise self.value [ 742.235630] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.235630] env[61648]: updated_port = self._update_port( [ 742.235630] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.235630] env[61648]: _ensure_no_port_binding_failure(port) [ 742.235630] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.235630] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 742.237388] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 742.237388] env[61648]: Removing descriptor: 19 [ 742.237388] env[61648]: ERROR nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Traceback (most recent call last): [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] yield resources [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self.driver.spawn(context, instance, image_meta, [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.237388] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] vm_ref = self.build_virtual_machine(instance, [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] for vif in network_info: [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return self._sync_wrapper(fn, *args, **kwargs) [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self.wait() [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self[:] = self._gt.wait() [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return self._exit_event.wait() [ 742.238176] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] result = hub.switch() [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return self.greenlet.switch() [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] result = function(*args, **kwargs) [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return func(*args, **kwargs) [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] raise e [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] nwinfo = self.network_api.allocate_for_instance( [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.238814] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] created_port_ids = self._update_ports_for_instance( [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] with excutils.save_and_reraise_exception(): [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self.force_reraise() [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] raise self.value [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] updated_port = self._update_port( [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] _ensure_no_port_binding_failure(port) [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.239365] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] raise exception.PortBindingFailed(port_id=port['id']) [ 742.239775] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 742.239775] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] [ 742.239775] env[61648]: INFO nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Terminating instance [ 742.239775] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.239775] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquired lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.239775] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.317456] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.395751] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.625680] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6f370a-ce58-4050-9318-54861ebf3f4a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.633979] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75a9643-3fa3-4943-a841-e6f13752d1a7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.672272] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb8b00f-cff3-407c-9d8c-40ca6bbc62fe {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.679479] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffa2cf8-0b93-474b-923d-6fb4bce3acc3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.683389] env[61648]: INFO nova.compute.manager [-] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Took 1.03 seconds to deallocate network for instance. [ 742.685641] env[61648]: DEBUG nova.compute.claims [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 742.685817] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.694219] env[61648]: DEBUG nova.compute.provider_tree [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.760145] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.878112] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.898859] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Releasing lock "refresh_cache-a1a8b990-f4b7-4049-9345-562d1b5c180e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.899378] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 742.899693] env[61648]: DEBUG nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.899977] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.926611] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.196967] env[61648]: DEBUG nova.scheduler.client.report [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.356923] env[61648]: DEBUG nova.compute.manager [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Received event network-changed-a6bd2153-910b-4f71-acb0-1f550dc324b8 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 743.357148] env[61648]: DEBUG nova.compute.manager [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Refreshing instance network info cache due to event network-changed-a6bd2153-910b-4f71-acb0-1f550dc324b8. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 743.357340] env[61648]: DEBUG oslo_concurrency.lockutils [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] Acquiring lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.385190] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Releasing lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.385629] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 743.385815] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.386130] env[61648]: DEBUG oslo_concurrency.lockutils [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] Acquired lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.386303] env[61648]: DEBUG nova.network.neutron [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Refreshing network info cache for port a6bd2153-910b-4f71-acb0-1f550dc324b8 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 743.387437] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97aaedc8-bfeb-496a-a692-30d6488afdeb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.397553] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4416b4c3-7a3f-4fe4-a107-ce86cc529896 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.420790] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 14854fd0-680a-48a2-b1d6-50e75624aef2 could not be found. [ 743.421014] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.421199] env[61648]: INFO nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 743.421431] env[61648]: DEBUG oslo.service.loopingcall [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.421687] env[61648]: DEBUG nova.compute.manager [-] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 743.421787] env[61648]: DEBUG nova.network.neutron [-] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.428170] env[61648]: DEBUG nova.network.neutron [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.440817] env[61648]: DEBUG nova.network.neutron [-] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.701590] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.922s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.702206] env[61648]: ERROR nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Traceback (most recent call last): [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self.driver.spawn(context, instance, image_meta, [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] vm_ref = self.build_virtual_machine(instance, [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.702206] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] for vif in network_info: [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return self._sync_wrapper(fn, *args, **kwargs) [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self.wait() [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self[:] = self._gt.wait() [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return self._exit_event.wait() [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] result = hub.switch() [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 743.702781] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return self.greenlet.switch() [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] result = function(*args, **kwargs) [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] return func(*args, **kwargs) [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] raise e [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] nwinfo = self.network_api.allocate_for_instance( [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] created_port_ids = self._update_ports_for_instance( [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] with excutils.save_and_reraise_exception(): [ 743.703433] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] self.force_reraise() [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] raise self.value [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] updated_port = self._update_port( [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] _ensure_no_port_binding_failure(port) [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] raise exception.PortBindingFailed(port_id=port['id']) [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] nova.exception.PortBindingFailed: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. [ 743.704050] env[61648]: ERROR nova.compute.manager [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] [ 743.704637] env[61648]: DEBUG nova.compute.utils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 743.704637] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.349s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.706218] env[61648]: INFO nova.compute.claims [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.709813] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Build of instance a978aa73-3f2a-4a87-bda3-bcde3028a646 was re-scheduled: Binding failed for port d03e3d83-a6c1-4354-b3ca-0a3681a209b4, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 743.710265] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 743.710485] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquiring lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.710629] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Acquired lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.710785] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 743.906422] env[61648]: DEBUG nova.network.neutron [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.931064] env[61648]: INFO nova.compute.manager [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] [instance: a1a8b990-f4b7-4049-9345-562d1b5c180e] Took 1.03 seconds to deallocate network for instance. [ 743.944599] env[61648]: DEBUG nova.network.neutron [-] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.006304] env[61648]: DEBUG nova.network.neutron [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.230305] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.318030] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.447561] env[61648]: INFO nova.compute.manager [-] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Took 1.03 seconds to deallocate network for instance. [ 744.449699] env[61648]: DEBUG nova.compute.claims [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 744.449878] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.508808] env[61648]: DEBUG oslo_concurrency.lockutils [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] Releasing lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.509148] env[61648]: DEBUG nova.compute.manager [req-3acbda77-c85d-489c-b0ca-832826c4b138 req-ccc677a0-5e69-4cb3-9e73-c9b12eeecbb4 service nova] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Received event network-vif-deleted-a6bd2153-910b-4f71-acb0-1f550dc324b8 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 744.821587] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Releasing lock "refresh_cache-a978aa73-3f2a-4a87-bda3-bcde3028a646" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.821865] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 744.824018] env[61648]: DEBUG nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 744.824018] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.838091] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.966764] env[61648]: INFO nova.scheduler.client.report [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Deleted allocations for instance a1a8b990-f4b7-4049-9345-562d1b5c180e [ 745.118261] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93505b2b-018e-420a-8155-d0f82007faf1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.125795] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e530260e-f9b4-47c8-8188-2348734de024 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.156642] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430b8378-52aa-424f-b112-1a7432f0e95f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.163967] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b74ad06-e146-4da2-b54d-10ac644857de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.177009] env[61648]: DEBUG nova.compute.provider_tree [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.343043] env[61648]: DEBUG nova.network.neutron [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.478014] env[61648]: DEBUG oslo_concurrency.lockutils [None req-295655bb-e611-472f-9efb-eaecf116ea0a tempest-ServersTestFqdnHostnames-1990258026 tempest-ServersTestFqdnHostnames-1990258026-project-member] Lock "a1a8b990-f4b7-4049-9345-562d1b5c180e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 169.910s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.680418] env[61648]: DEBUG nova.scheduler.client.report [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.848081] env[61648]: INFO nova.compute.manager [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] [instance: a978aa73-3f2a-4a87-bda3-bcde3028a646] Took 1.02 seconds to deallocate network for instance. [ 745.985198] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 746.186793] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.186793] env[61648]: DEBUG nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 746.194222] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.812s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.505177] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.696536] env[61648]: DEBUG nova.compute.utils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.702947] env[61648]: DEBUG nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 746.877803] env[61648]: INFO nova.scheduler.client.report [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Deleted allocations for instance a978aa73-3f2a-4a87-bda3-bcde3028a646 [ 747.023505] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa82b9c8-63e6-4263-9981-ae29506c4340 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.031971] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e85eca-f546-4fac-8467-d9ea7ffd2ba4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.068595] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742e2806-92a0-4ac4-a423-e5b5186a9374 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.076916] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0a9b4a-33c6-4dda-b1a3-55a08dcb3958 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.094455] env[61648]: DEBUG nova.compute.provider_tree [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.204245] env[61648]: DEBUG nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 747.392414] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ecaab5ab-5d16-40bd-868d-0f06ec9fcc7d tempest-ImagesNegativeTestJSON-691148915 tempest-ImagesNegativeTestJSON-691148915-project-member] Lock "a978aa73-3f2a-4a87-bda3-bcde3028a646" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 171.704s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.600779] env[61648]: DEBUG nova.scheduler.client.report [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.897862] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 748.106715] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.915s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.107416] env[61648]: ERROR nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Traceback (most recent call last): [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self.driver.spawn(context, instance, image_meta, [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] vm_ref = self.build_virtual_machine(instance, [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.107416] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] for vif in network_info: [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return self._sync_wrapper(fn, *args, **kwargs) [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self.wait() [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self[:] = self._gt.wait() [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return self._exit_event.wait() [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] result = hub.switch() [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.108231] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return self.greenlet.switch() [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] result = function(*args, **kwargs) [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] return func(*args, **kwargs) [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] raise e [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] nwinfo = self.network_api.allocate_for_instance( [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] created_port_ids = self._update_ports_for_instance( [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] with excutils.save_and_reraise_exception(): [ 748.108575] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] self.force_reraise() [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] raise self.value [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] updated_port = self._update_port( [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] _ensure_no_port_binding_failure(port) [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] raise exception.PortBindingFailed(port_id=port['id']) [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] nova.exception.PortBindingFailed: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. [ 748.109474] env[61648]: ERROR nova.compute.manager [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] [ 748.110413] env[61648]: DEBUG nova.compute.utils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.110413] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.314s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.112873] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Build of instance 0a321a24-0f87-47e7-8364-5da5f6a65131 was re-scheduled: Binding failed for port 932c5e74-0d34-44ef-8bda-e68b9a72b607, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 748.113550] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 748.113610] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquiring lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.113752] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Acquired lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.113924] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.215615] env[61648]: DEBUG nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 748.253877] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.255539] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.255728] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.255920] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.256088] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.256233] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.256436] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.256589] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.256744] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.256896] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.257117] env[61648]: DEBUG nova.virt.hardware [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.257991] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9419b0a-e46d-4579-9fad-23f55df91126 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.268962] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f38536-a479-4f1f-855e-d2f305710aac {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.283760] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.289964] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Creating folder: Project (37b4b3d8247b47a4b0656290af27e54d). Parent ref: group-v285225. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.290281] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-652c42d0-975d-4830-bad0-002f340e9184 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.304701] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Created folder: Project (37b4b3d8247b47a4b0656290af27e54d) in parent group-v285225. [ 748.304701] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Creating folder: Instances. Parent ref: group-v285242. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.304701] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36719dcb-5728-4879-9972-626354946f18 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.312561] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Created folder: Instances in parent group-v285242. [ 748.312813] env[61648]: DEBUG oslo.service.loopingcall [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.313015] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.313289] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9604e06-0700-4180-b72a-46cbce1cc90b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.331486] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.331486] env[61648]: value = "task-1336656" [ 748.331486] env[61648]: _type = "Task" [ 748.331486] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.338659] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336656, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.434112] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.647135] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.809908] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.846941] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336656, 'name': CreateVM_Task, 'duration_secs': 0.277968} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.849629] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.850115] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.850276] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.850586] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 748.850971] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c722bc46-fe90-4f1c-b037-05fac52ea2ce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.859588] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 748.859588] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]527ebded-3d29-0dd3-470f-76e9f50731dd" [ 748.859588] env[61648]: _type = "Task" [ 748.859588] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.873984] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]527ebded-3d29-0dd3-470f-76e9f50731dd, 'name': SearchDatastore_Task, 'duration_secs': 0.015491} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.874714] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.874714] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.875234] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.875234] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.875234] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.875442] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6558e277-1024-4e77-88dc-0410bcebacdd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.887720] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.887905] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.888876] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fa402d1-9f3e-4616-bf5d-917cadc3c449 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.893755] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 748.893755] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f7cfd5-2d50-edf0-65a4-6f2c3c964139" [ 748.893755] env[61648]: _type = "Task" [ 748.893755] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.902569] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f7cfd5-2d50-edf0-65a4-6f2c3c964139, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.985137] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115a80a0-9095-4a67-9fc5-b9d27d1aec5b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.993976] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6076bc-916f-4704-a8fe-25e80eaa6e4a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.024356] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f8a48d-f3e2-4be9-bc26-43ad865076b9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.031565] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd6c565-067f-4664-98e0-31943dd311e2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.044828] env[61648]: DEBUG nova.compute.provider_tree [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.312429] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Releasing lock "refresh_cache-0a321a24-0f87-47e7-8364-5da5f6a65131" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.312689] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 749.313241] env[61648]: DEBUG nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 749.313241] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 749.405029] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52f7cfd5-2d50-edf0-65a4-6f2c3c964139, 'name': SearchDatastore_Task, 'duration_secs': 0.015275} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.405029] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dcaf1f-9036-4f84-bc86-c6f596d9759f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.410096] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 749.410096] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e66334-ba78-1f28-5d00-87f5c5672627" [ 749.410096] env[61648]: _type = "Task" [ 749.410096] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.419082] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e66334-ba78-1f28-5d00-87f5c5672627, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.502592] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.547873] env[61648]: DEBUG nova.scheduler.client.report [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.923111] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e66334-ba78-1f28-5d00-87f5c5672627, 'name': SearchDatastore_Task, 'duration_secs': 0.011146} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.923410] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.923494] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.923732] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57c8d2d7-e84a-49c0-a975-294b05b24f1f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.933061] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 749.933061] env[61648]: value = "task-1336657" [ 749.933061] env[61648]: _type = "Task" [ 749.933061] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.940436] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.007288] env[61648]: DEBUG nova.network.neutron [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.055681] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.056154] env[61648]: ERROR nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Traceback (most recent call last): [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self.driver.spawn(context, instance, image_meta, [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self._vmops.spawn(context, instance, image_meta, injected_files, [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] vm_ref = self.build_virtual_machine(instance, [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] vif_infos = vmwarevif.get_vif_info(self._session, [ 750.056154] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] for vif in network_info: [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] return self._sync_wrapper(fn, *args, **kwargs) [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self.wait() [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self[:] = self._gt.wait() [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] return self._exit_event.wait() [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] current.throw(*self._exc) [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 750.056508] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] result = function(*args, **kwargs) [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] return func(*args, **kwargs) [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] raise e [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] nwinfo = self.network_api.allocate_for_instance( [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] created_port_ids = self._update_ports_for_instance( [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] with excutils.save_and_reraise_exception(): [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] self.force_reraise() [ 750.056862] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] raise self.value [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] updated_port = self._update_port( [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] _ensure_no_port_binding_failure(port) [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] raise exception.PortBindingFailed(port_id=port['id']) [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] nova.exception.PortBindingFailed: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. [ 750.057314] env[61648]: ERROR nova.compute.manager [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] [ 750.057314] env[61648]: DEBUG nova.compute.utils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 750.059161] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.737s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.066027] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Build of instance 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297 was re-scheduled: Binding failed for port 034e1507-1467-4dbc-a6d7-51b217eb67b5, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 750.066027] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 750.066027] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquiring lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.066027] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Acquired lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.066287] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.441834] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336657, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.510823] env[61648]: INFO nova.compute.manager [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] [instance: 0a321a24-0f87-47e7-8364-5da5f6a65131] Took 1.20 seconds to deallocate network for instance. [ 750.591521] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.710758] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.873759] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b33109a-df25-4ffc-967e-1de7e06008fa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.881728] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c1c7fa-1d97-4ad8-8317-9b4359f65a37 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.912325] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c164c1-5a35-40f4-a457-99353e1ce077 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.919326] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f010a9f5-4033-42c6-8fce-702367250764 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.932131] env[61648]: DEBUG nova.compute.provider_tree [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 750.941443] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534533} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.941443] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.941640] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.942587] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36d02bc3-ef7f-4b03-b56a-66d987ae81ca {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.949089] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 750.949089] env[61648]: value = "task-1336658" [ 750.949089] env[61648]: _type = "Task" [ 750.949089] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.956675] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.213958] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Releasing lock "refresh_cache-81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.214234] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 751.214424] env[61648]: DEBUG nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.214590] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.231096] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.462852] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071204} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.467138] env[61648]: ERROR nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [req-c23ef09b-0fd8-4073-bbc5-3c9a325764e8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c23ef09b-0fd8-4073-bbc5-3c9a325764e8"}]}: nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 751.467138] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.468175] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e0d378-30dc-4aa4-b25a-06c40bde7682 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.487760] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.488897] env[61648]: DEBUG nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 751.490768] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33517696-f4ee-4aa8-b7ab-d1809c4fa48a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.511119] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 751.511119] env[61648]: value = "task-1336659" [ 751.511119] env[61648]: _type = "Task" [ 751.511119] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.521600] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.522078] env[61648]: DEBUG nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 751.522392] env[61648]: DEBUG nova.compute.provider_tree [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 751.533695] env[61648]: DEBUG nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 751.549153] env[61648]: INFO nova.scheduler.client.report [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Deleted allocations for instance 0a321a24-0f87-47e7-8364-5da5f6a65131 [ 751.556782] env[61648]: DEBUG nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 751.735144] env[61648]: DEBUG nova.network.neutron [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.882490] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200f6ee8-4bff-476f-b4e1-1abd397905c0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.890904] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b95ac02-a289-4f5f-9112-74288214970b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.921446] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7620493f-d6eb-4049-9931-eb3ca6a78392 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.928714] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28574645-e03c-40aa-a56c-0262c9ee46a0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.944024] env[61648]: DEBUG nova.compute.provider_tree [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 752.021614] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336659, 'name': ReconfigVM_Task, 'duration_secs': 0.276979} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.021614] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Reconfigured VM instance instance-00000033 to attach disk [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.022068] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-825db65a-778a-47c2-876d-8b99e6a5080f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.028472] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 752.028472] env[61648]: value = "task-1336660" [ 752.028472] env[61648]: _type = "Task" [ 752.028472] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.035851] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336660, 'name': Rename_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.060729] env[61648]: DEBUG oslo_concurrency.lockutils [None req-af403bd2-c9d4-4d35-8c2d-250610a945dd tempest-SecurityGroupsTestJSON-2025830300 tempest-SecurityGroupsTestJSON-2025830300-project-member] Lock "0a321a24-0f87-47e7-8364-5da5f6a65131" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 167.409s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.238901] env[61648]: INFO nova.compute.manager [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] [instance: 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297] Took 1.02 seconds to deallocate network for instance. [ 752.483963] env[61648]: DEBUG nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 752.484227] env[61648]: DEBUG nova.compute.provider_tree [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 80 to 81 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 752.484402] env[61648]: DEBUG nova.compute.provider_tree [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 752.540081] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336660, 'name': Rename_Task, 'duration_secs': 0.125569} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.540081] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.540081] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f66a315-acad-4254-9de4-2af4d2206aa8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.547057] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 752.547057] env[61648]: value = "task-1336661" [ 752.547057] env[61648]: _type = "Task" [ 752.547057] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.554955] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.563488] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 752.989978] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.932s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.990665] env[61648]: ERROR nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Traceback (most recent call last): [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self.driver.spawn(context, instance, image_meta, [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] vm_ref = self.build_virtual_machine(instance, [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 752.990665] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] for vif in network_info: [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return self._sync_wrapper(fn, *args, **kwargs) [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self.wait() [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self[:] = self._gt.wait() [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return self._exit_event.wait() [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] result = hub.switch() [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 752.994231] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return self.greenlet.switch() [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] result = function(*args, **kwargs) [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] return func(*args, **kwargs) [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] raise e [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] nwinfo = self.network_api.allocate_for_instance( [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] created_port_ids = self._update_ports_for_instance( [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] with excutils.save_and_reraise_exception(): [ 752.994875] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] self.force_reraise() [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] raise self.value [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] updated_port = self._update_port( [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] _ensure_no_port_binding_failure(port) [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] raise exception.PortBindingFailed(port_id=port['id']) [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] nova.exception.PortBindingFailed: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. [ 752.995274] env[61648]: ERROR nova.compute.manager [instance: 959931f5-eebc-4544-af88-ea231301b4a5] [ 752.995587] env[61648]: DEBUG nova.compute.utils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 752.995587] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.307s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.995587] env[61648]: INFO nova.compute.claims [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.998924] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Build of instance 959931f5-eebc-4544-af88-ea231301b4a5 was re-scheduled: Binding failed for port 21dfc9d8-7b33-46b1-a748-06f75cc65b71, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 753.003019] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 753.003019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.003019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.003019] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.062448] env[61648]: DEBUG oslo_vmware.api [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336661, 'name': PowerOnVM_Task, 'duration_secs': 0.43211} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.062743] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.062952] env[61648]: INFO nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Took 4.85 seconds to spawn the instance on the hypervisor. [ 753.063144] env[61648]: DEBUG nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 753.063962] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953eafb8-7d84-48a8-ac9c-f7dee3a49f8b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.088105] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.272661] env[61648]: INFO nova.scheduler.client.report [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Deleted allocations for instance 81e45c4b-ca97-4dfb-a6fe-2ada01d5c297 [ 753.532620] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.588816] env[61648]: INFO nova.compute.manager [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Took 28.26 seconds to build instance. [ 753.658529] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.782051] env[61648]: DEBUG oslo_concurrency.lockutils [None req-208c62ea-d21c-4808-bd72-ea3188862cc5 tempest-ServerMetadataTestJSON-1582907501 tempest-ServerMetadataTestJSON-1582907501-project-member] Lock "81e45c4b-ca97-4dfb-a6fe-2ada01d5c297" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 162.461s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.097729] env[61648]: DEBUG oslo_concurrency.lockutils [None req-50da13e6-cc6f-4b7f-a7ab-5cf9c2aa8b50 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "b9130bac-f92b-4208-b84c-852f4a269153" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.638s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.161311] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-959931f5-eebc-4544-af88-ea231301b4a5" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.161423] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 754.162105] env[61648]: DEBUG nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.162105] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 754.197984] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.234915] env[61648]: INFO nova.compute.manager [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Rebuilding instance [ 754.283875] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 754.299888] env[61648]: DEBUG nova.compute.manager [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 754.299888] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1192d9-c8c1-4094-9f9b-5c2ccd8bb93f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.350394] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e71fed-cff8-43a0-b092-96be6da56526 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.360521] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1881e34-d8e1-4a96-b6d9-21626aebf136 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.390308] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25ba795-393f-49f2-b6ec-2843687ca8f2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.399147] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316f1868-1e04-4e21-ab50-5a1c4629eefc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.413104] env[61648]: DEBUG nova.compute.provider_tree [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.600038] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 754.705561] env[61648]: DEBUG nova.network.neutron [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.818356] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.823332] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.826595] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b9ef512-1dd8-4cb8-99a6-b835a2af9fd9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.836095] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 754.836095] env[61648]: value = "task-1336662" [ 754.836095] env[61648]: _type = "Task" [ 754.836095] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.846090] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.917931] env[61648]: DEBUG nova.scheduler.client.report [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 755.125168] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.208644] env[61648]: INFO nova.compute.manager [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 959931f5-eebc-4544-af88-ea231301b4a5] Took 1.05 seconds to deallocate network for instance. [ 755.352015] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336662, 'name': PowerOffVM_Task, 'duration_secs': 0.11998} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.353578] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.354264] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.355157] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3cba45-9b0f-40f5-85a9-150fc0607da3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.365969] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.366393] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2045c131-16a7-4386-8b1e-b1d4492a9464 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.389992] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.390234] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.390550] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Deleting the datastore file [datastore2] b9130bac-f92b-4208-b84c-852f4a269153 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.390675] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57233803-2df3-4e91-a1ae-9f63cb626a4e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.397289] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 755.397289] env[61648]: value = "task-1336664" [ 755.397289] env[61648]: _type = "Task" [ 755.397289] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.404790] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.424343] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.424879] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 755.429909] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.789s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.431405] env[61648]: INFO nova.compute.claims [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.907054] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103255} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.907357] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.907622] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.908581] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.935779] env[61648]: DEBUG nova.compute.utils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.937338] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 755.937515] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.001462] env[61648]: DEBUG nova.policy [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07695904a1d6484e9890f7a83d0252fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abf6ecd1a0b94fa1b2e085bb6fdef2c2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 756.267639] env[61648]: INFO nova.scheduler.client.report [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Deleted allocations for instance 959931f5-eebc-4544-af88-ea231301b4a5 [ 756.443037] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Successfully created port: 1756d1ff-7e88-4633-b466-c45f9bcb0279 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.446474] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 756.779408] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eacae05c-7268-4480-9ee1-969bbba944ea tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "959931f5-eebc-4544-af88-ea231301b4a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.125s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.848799] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662efc4a-2ba7-49f4-9e54-0ea0759ba797 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.856681] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf4ae48-87a0-436c-9892-a724438e0df2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.890949] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d927695f-54db-4026-b424-f6df6370d037 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.899097] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f3a7fd-9331-4e3c-9fbb-43db7f2eca6f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.912825] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.955079] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.955079] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.955079] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.955079] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.955513] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.955513] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.955591] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.955696] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.955856] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.956054] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.956198] env[61648]: DEBUG nova.virt.hardware [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.961617] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6133c93b-b85e-4c89-a84c-160516a98930 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.970573] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecb4ec5-466b-4aab-ad73-f63b101bc198 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.989725] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.995772] env[61648]: DEBUG oslo.service.loopingcall [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.995772] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.995993] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34247ad5-1741-41c5-8a15-e7b0c6611185 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.016571] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 757.016571] env[61648]: value = "task-1336665" [ 757.016571] env[61648]: _type = "Task" [ 757.016571] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.024617] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336665, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.041206] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.041689] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.282252] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 757.444889] env[61648]: ERROR nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [req-eb6dc968-b30f-4795-b885-05265588ee11] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eb6dc968-b30f-4795-b885-05265588ee11"}]} [ 757.462834] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 757.466966] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 757.480567] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 757.480863] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 757.492329] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 757.497532] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.497667] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.497805] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.497979] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.498152] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.498339] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.498536] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.499616] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.499616] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.499616] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.499616] env[61648]: DEBUG nova.virt.hardware [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.500407] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0f0a2e-36b3-4b08-a73b-f8e053ad1247 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.514273] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512ee915-a3aa-40f4-b894-b9641937dcb2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.521318] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 757.526281] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "acc5b6cb-16ee-4756-9088-fa094eb83daa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.526281] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "acc5b6cb-16ee-4756-9088-fa094eb83daa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.542017] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336665, 'name': CreateVM_Task, 'duration_secs': 0.277678} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.542017] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.542017] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.542017] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.542017] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 757.542017] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dc65abe-1ed5-492d-bcaa-aa26fe925676 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.548822] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 757.548822] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]5235f603-10e5-ee79-58c6-e5a841b83126" [ 757.548822] env[61648]: _type = "Task" [ 757.548822] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.553744] env[61648]: DEBUG nova.compute.manager [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Received event network-changed-1756d1ff-7e88-4633-b466-c45f9bcb0279 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 757.553921] env[61648]: DEBUG nova.compute.manager [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Refreshing instance network info cache due to event network-changed-1756d1ff-7e88-4633-b466-c45f9bcb0279. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 757.554141] env[61648]: DEBUG oslo_concurrency.lockutils [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] Acquiring lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.554279] env[61648]: DEBUG oslo_concurrency.lockutils [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] Acquired lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.554435] env[61648]: DEBUG nova.network.neutron [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Refreshing network info cache for port 1756d1ff-7e88-4633-b466-c45f9bcb0279 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.560407] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5235f603-10e5-ee79-58c6-e5a841b83126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.719737] env[61648]: ERROR nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 757.719737] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.719737] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.719737] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.719737] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.719737] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.719737] env[61648]: ERROR nova.compute.manager raise self.value [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.719737] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 757.719737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.719737] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 757.720235] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.720235] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 757.720235] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 757.720235] env[61648]: ERROR nova.compute.manager [ 757.720235] env[61648]: Traceback (most recent call last): [ 757.720235] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 757.720235] env[61648]: listener.cb(fileno) [ 757.720235] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.720235] env[61648]: result = function(*args, **kwargs) [ 757.720235] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.720235] env[61648]: return func(*args, **kwargs) [ 757.720235] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.720235] env[61648]: raise e [ 757.720235] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.720235] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 757.720235] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.720235] env[61648]: created_port_ids = self._update_ports_for_instance( [ 757.720235] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.720235] env[61648]: with excutils.save_and_reraise_exception(): [ 757.720235] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.720235] env[61648]: self.force_reraise() [ 757.720235] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.720235] env[61648]: raise self.value [ 757.720235] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.720235] env[61648]: updated_port = self._update_port( [ 757.720235] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.720235] env[61648]: _ensure_no_port_binding_failure(port) [ 757.720235] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.720235] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 757.721042] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 757.721042] env[61648]: Removing descriptor: 19 [ 757.721042] env[61648]: ERROR nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Traceback (most recent call last): [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] yield resources [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self.driver.spawn(context, instance, image_meta, [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.721042] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] vm_ref = self.build_virtual_machine(instance, [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] for vif in network_info: [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return self._sync_wrapper(fn, *args, **kwargs) [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self.wait() [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self[:] = self._gt.wait() [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return self._exit_event.wait() [ 757.721418] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] result = hub.switch() [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return self.greenlet.switch() [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] result = function(*args, **kwargs) [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return func(*args, **kwargs) [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] raise e [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] nwinfo = self.network_api.allocate_for_instance( [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.721861] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] created_port_ids = self._update_ports_for_instance( [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] with excutils.save_and_reraise_exception(): [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self.force_reraise() [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] raise self.value [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] updated_port = self._update_port( [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] _ensure_no_port_binding_failure(port) [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.722291] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] raise exception.PortBindingFailed(port_id=port['id']) [ 757.722686] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 757.722686] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] [ 757.722686] env[61648]: INFO nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Terminating instance [ 757.723501] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.809198] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.826397] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa2f9ba-7504-445e-a361-de24945a7a3a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.835274] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a640f0-b378-4fc9-b89b-37b5887a5628 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.865827] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99eb4ddc-804c-4d91-aff1-880ba70a165a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.874410] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a75ae31-aaa0-4524-9aab-6c65e37e3773 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.892946] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 757.979375] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.979709] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.062723] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5235f603-10e5-ee79-58c6-e5a841b83126, 'name': SearchDatastore_Task, 'duration_secs': 0.012718} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.063195] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.063502] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 758.063840] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.064089] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.064394] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.065169] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb8a41e6-97a7-4943-8ad1-7e4c9f2b3257 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.074358] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.074507] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 758.075353] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-863a87e8-a903-47dc-b3a0-9ecc22e87ef1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.083171] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 758.083171] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52358e00-8912-9a76-cebc-1370d6d3e5ca" [ 758.083171] env[61648]: _type = "Task" [ 758.083171] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.088630] env[61648]: DEBUG nova.network.neutron [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.095706] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52358e00-8912-9a76-cebc-1370d6d3e5ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.206858] env[61648]: DEBUG nova.network.neutron [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.334677] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "db35b417-bcdb-4380-927a-f755e6421624" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.334920] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "db35b417-bcdb-4380-927a-f755e6421624" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.519868] env[61648]: ERROR nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [req-94f9f6ca-67ce-4a86-a453-b6d5a4945b8b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-94f9f6ca-67ce-4a86-a453-b6d5a4945b8b"}]} [ 758.537480] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 758.551538] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 758.551768] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 758.563261] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 758.578667] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 758.593128] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52358e00-8912-9a76-cebc-1370d6d3e5ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010012} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.596385] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27eed0f0-c5c3-45a1-8ec2-6ca75f1f0978 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.601566] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 758.601566] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]522087c6-3234-004f-c1d8-79cbf73392e9" [ 758.601566] env[61648]: _type = "Task" [ 758.601566] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.608892] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]522087c6-3234-004f-c1d8-79cbf73392e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.709109] env[61648]: DEBUG oslo_concurrency.lockutils [req-87c5d613-5a57-43f3-b330-29ad3d4c191c req-1edcbd08-2321-4917-a40c-15d2666f3c58 service nova] Releasing lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.709520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquired lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.709766] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.814478] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54bb53b-46df-47be-bddd-6d8fd212c4a7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.822493] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81edc17-36b7-4a6f-8e88-4789076511cf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.854599] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795645d1-4150-472a-aea5-63a68bc46fb7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.860945] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c6a5ff-f1ce-4552-8a58-254c4ca75b95 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.874190] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 759.088772] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "aa8fb674-60e3-431c-b8c3-9cc548965e18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.089059] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "aa8fb674-60e3-431c-b8c3-9cc548965e18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.111357] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]522087c6-3234-004f-c1d8-79cbf73392e9, 'name': SearchDatastore_Task, 'duration_secs': 0.010026} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.111604] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.111843] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 759.112114] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc788532-76f2-478f-98dd-7793ecf47ea8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.118355] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 759.118355] env[61648]: value = "task-1336666" [ 759.118355] env[61648]: _type = "Task" [ 759.118355] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.125974] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.238906] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.344573] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.395676] env[61648]: ERROR nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [req-76f2b5e0-875b-42ae-8448-26bf227efc8e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-76f2b5e0-875b-42ae-8448-26bf227efc8e"}]} [ 759.419850] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 759.436323] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 759.436593] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 759.450113] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 759.474651] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 759.604886] env[61648]: DEBUG nova.compute.manager [req-7040289a-5faf-42d4-9402-3bdb108430b6 req-3ab737bd-81d9-46b5-84db-9a6c85eff9d8 service nova] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Received event network-vif-deleted-1756d1ff-7e88-4633-b466-c45f9bcb0279 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 759.635391] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503106} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.635634] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.635908] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.636286] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d38c620-533e-4104-a2e3-1feb7f8e1cc7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.644682] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 759.644682] env[61648]: value = "task-1336667" [ 759.644682] env[61648]: _type = "Task" [ 759.644682] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.684662] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.822987] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6525f30-5580-4987-82a1-17f4b73002f4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.832415] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de965bf1-4d04-4b65-b842-43b2db79c963 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.861431] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Releasing lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.861922] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 759.862230] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.863076] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00563eeb-437d-437f-bf34-ebb83c1be407 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.865461] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb40c2d7-316a-4a71-9b62-05d6a17e78ab {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.873958] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d04e58-cc69-45b4-b87f-d3aab21c7504 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.880885] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aca82d-377c-4ca0-8667-3ea297a42489 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.900895] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.908843] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d76d8aed-9126-4d21-9df9-6317c3b19f65 could not be found. [ 759.908843] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.908843] env[61648]: INFO nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Took 0.05 seconds to destroy the instance on the hypervisor. [ 759.908843] env[61648]: DEBUG oslo.service.loopingcall [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.908843] env[61648]: DEBUG nova.compute.manager [-] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.909096] env[61648]: DEBUG nova.network.neutron [-] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.928506] env[61648]: DEBUG nova.network.neutron [-] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.156894] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065435} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.156894] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.156894] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac5af11-fec3-4d2c-aa84-e82ecb375e1f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.177413] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.177709] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9030e86-25a5-4b2c-871a-ddc41ce1036d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.197699] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 760.197699] env[61648]: value = "task-1336668" [ 760.197699] env[61648]: _type = "Task" [ 760.197699] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.205350] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336668, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.405022] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.430816] env[61648]: DEBUG nova.network.neutron [-] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.711017] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336668, 'name': ReconfigVM_Task, 'duration_secs': 0.276128} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.711017] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Reconfigured VM instance instance-00000033 to attach disk [datastore2] b9130bac-f92b-4208-b84c-852f4a269153/b9130bac-f92b-4208-b84c-852f4a269153.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.711017] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46f9f7fb-c8df-4082-9ecf-5e57d49625b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.715018] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 760.715018] env[61648]: value = "task-1336669" [ 760.715018] env[61648]: _type = "Task" [ 760.715018] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.722884] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336669, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.910187] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.480s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.910722] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 760.913266] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.053s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.932603] env[61648]: INFO nova.compute.manager [-] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Took 1.02 seconds to deallocate network for instance. [ 760.935195] env[61648]: DEBUG nova.compute.claims [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.935375] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.231602] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336669, 'name': Rename_Task, 'duration_secs': 0.268354} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.231918] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.232217] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf9620a7-ce19-41c5-b672-f225bbb1b0de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.241403] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 761.241403] env[61648]: value = "task-1336670" [ 761.241403] env[61648]: _type = "Task" [ 761.241403] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.249546] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.416319] env[61648]: DEBUG nova.compute.utils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.429464] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 761.429464] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.466292] env[61648]: DEBUG nova.policy [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a08aab0e4e646d6986c4eb433c7a8a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a14d8ea4094d97915d0bfbf344201d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 761.753363] env[61648]: DEBUG oslo_vmware.api [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336670, 'name': PowerOnVM_Task, 'duration_secs': 0.407314} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.753631] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.753831] env[61648]: DEBUG nova.compute.manager [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 761.754626] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0dc620-44cb-49ab-801d-10623360addf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.769264] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Successfully created port: 2b3a0b89-5f13-4fa2-8319-e164abbb9e27 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.934236] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 761.954802] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 831da774-5e37-4d49-a1fd-3eb421c7fcb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.954963] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 14854fd0-680a-48a2-b1d6-50e75624aef2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.955099] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance b9130bac-f92b-4208-b84c-852f4a269153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.955215] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance d76d8aed-9126-4d21-9df9-6317c3b19f65 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 761.955324] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance bb9f6dc4-fd06-4cb5-984f-c938ed901772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 762.278981] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.463968] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 762.548194] env[61648]: DEBUG nova.compute.manager [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Received event network-changed-2b3a0b89-5f13-4fa2-8319-e164abbb9e27 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 762.548425] env[61648]: DEBUG nova.compute.manager [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Refreshing instance network info cache due to event network-changed-2b3a0b89-5f13-4fa2-8319-e164abbb9e27. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 762.548614] env[61648]: DEBUG oslo_concurrency.lockutils [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] Acquiring lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.548748] env[61648]: DEBUG oslo_concurrency.lockutils [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] Acquired lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.549139] env[61648]: DEBUG nova.network.neutron [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Refreshing network info cache for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.852531] env[61648]: ERROR nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 762.852531] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 762.852531] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.852531] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.852531] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.852531] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.852531] env[61648]: ERROR nova.compute.manager raise self.value [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.852531] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 762.852531] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.852531] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 762.852932] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.852932] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 762.852932] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 762.852932] env[61648]: ERROR nova.compute.manager [ 762.852932] env[61648]: Traceback (most recent call last): [ 762.852932] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 762.852932] env[61648]: listener.cb(fileno) [ 762.852932] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 762.852932] env[61648]: result = function(*args, **kwargs) [ 762.852932] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 762.852932] env[61648]: return func(*args, **kwargs) [ 762.852932] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 762.852932] env[61648]: raise e [ 762.852932] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 762.852932] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 762.852932] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.852932] env[61648]: created_port_ids = self._update_ports_for_instance( [ 762.852932] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.852932] env[61648]: with excutils.save_and_reraise_exception(): [ 762.852932] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.852932] env[61648]: self.force_reraise() [ 762.852932] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.852932] env[61648]: raise self.value [ 762.852932] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.852932] env[61648]: updated_port = self._update_port( [ 762.852932] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.852932] env[61648]: _ensure_no_port_binding_failure(port) [ 762.852932] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.852932] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 762.853663] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 762.853663] env[61648]: Removing descriptor: 19 [ 762.947517] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 762.968194] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance ffb6b3e0-5602-4c28-958d-22265337e236 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.006530] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.006765] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.006920] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.007153] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.007301] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.007471] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.007689] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.007845] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.008015] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.008598] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.008908] env[61648]: DEBUG nova.virt.hardware [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.010049] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c863a72-caab-448a-b836-d1297ec520a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.017996] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd156275-5901-45a8-af2a-bace4d180f52 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.033074] env[61648]: ERROR nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Traceback (most recent call last): [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] yield resources [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self.driver.spawn(context, instance, image_meta, [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] vm_ref = self.build_virtual_machine(instance, [ 763.033074] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] for vif in network_info: [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] return self._sync_wrapper(fn, *args, **kwargs) [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self.wait() [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self[:] = self._gt.wait() [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] return self._exit_event.wait() [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 763.033424] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] current.throw(*self._exc) [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] result = function(*args, **kwargs) [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] return func(*args, **kwargs) [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] raise e [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] nwinfo = self.network_api.allocate_for_instance( [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] created_port_ids = self._update_ports_for_instance( [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] with excutils.save_and_reraise_exception(): [ 763.033733] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self.force_reraise() [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] raise self.value [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] updated_port = self._update_port( [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] _ensure_no_port_binding_failure(port) [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] raise exception.PortBindingFailed(port_id=port['id']) [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 763.034025] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] [ 763.034025] env[61648]: INFO nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Terminating instance [ 763.035386] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquiring lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.069258] env[61648]: DEBUG nova.network.neutron [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.124811] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "b9130bac-f92b-4208-b84c-852f4a269153" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.126098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "b9130bac-f92b-4208-b84c-852f4a269153" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.126098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "b9130bac-f92b-4208-b84c-852f4a269153-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.126098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "b9130bac-f92b-4208-b84c-852f4a269153-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.126098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "b9130bac-f92b-4208-b84c-852f4a269153-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.127998] env[61648]: INFO nova.compute.manager [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Terminating instance [ 763.130020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "refresh_cache-b9130bac-f92b-4208-b84c-852f4a269153" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.130187] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquired lock "refresh_cache-b9130bac-f92b-4208-b84c-852f4a269153" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.130349] env[61648]: DEBUG nova.network.neutron [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.217180] env[61648]: DEBUG nova.network.neutron [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.479532] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 6d9ab9ac-d892-47e7-9b86-a2dce40a4568 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 763.659678] env[61648]: DEBUG nova.network.neutron [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.720424] env[61648]: DEBUG oslo_concurrency.lockutils [req-57cb1c98-864c-4be5-8a1b-84988cbbbad5 req-c4f4e613-2e74-4385-bc8b-5f42a04af9e9 service nova] Releasing lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.721215] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquired lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.721418] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.765562] env[61648]: DEBUG nova.network.neutron [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.982497] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 26534fec-8cf8-4cdd-a91f-e63afabc0d57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.246599] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.274191] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Releasing lock "refresh_cache-b9130bac-f92b-4208-b84c-852f4a269153" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.274650] env[61648]: DEBUG nova.compute.manager [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 764.274842] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.275730] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ff11a0-6411-4f27-8c38-65db9d2f70bc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.283566] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.283691] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1067d28-76a8-4785-bbed-3ae7cf0f70a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.289708] env[61648]: DEBUG oslo_vmware.api [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 764.289708] env[61648]: value = "task-1336671" [ 764.289708] env[61648]: _type = "Task" [ 764.289708] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.305687] env[61648]: DEBUG oslo_vmware.api [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.353027] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.485768] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance d4b580e9-aae2-4c14-abd8-c6a08f0a576c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.686414] env[61648]: DEBUG nova.compute.manager [req-99847a96-c608-4b4c-94a0-6fd615f8e7f5 req-d5e55766-7f60-4d8d-a67a-2b624de0d70d service nova] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Received event network-vif-deleted-2b3a0b89-5f13-4fa2-8319-e164abbb9e27 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 764.801847] env[61648]: DEBUG oslo_vmware.api [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336671, 'name': PowerOffVM_Task, 'duration_secs': 0.131994} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.802392] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.802632] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.802944] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f167394c-3613-49ed-bbf2-499977329c26 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.830445] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.830729] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.830952] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Deleting the datastore file [datastore2] b9130bac-f92b-4208-b84c-852f4a269153 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.831236] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aba8fde7-7c0e-484b-b1d8-9b1b055af261 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.838069] env[61648]: DEBUG oslo_vmware.api [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for the task: (returnval){ [ 764.838069] env[61648]: value = "task-1336673" [ 764.838069] env[61648]: _type = "Task" [ 764.838069] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.845907] env[61648]: DEBUG oslo_vmware.api [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.855488] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Releasing lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.855893] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 764.856096] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.856352] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01f74618-c9f2-4ed0-b72e-90a0df51ed40 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.864387] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fcb0b3-986b-411a-88c4-d897ece58936 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.885421] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb9f6dc4-fd06-4cb5-984f-c938ed901772 could not be found. [ 764.885647] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.885831] env[61648]: INFO nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Took 0.03 seconds to destroy the instance on the hypervisor. [ 764.886118] env[61648]: DEBUG oslo.service.loopingcall [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.886346] env[61648]: DEBUG nova.compute.manager [-] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 764.886440] env[61648]: DEBUG nova.network.neutron [-] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 764.906519] env[61648]: DEBUG nova.network.neutron [-] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.989081] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 423b5f66-624b-49fe-9f65-9bd3318917c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.348438] env[61648]: DEBUG oslo_vmware.api [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Task: {'id': task-1336673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086159} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.348801] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.349050] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.349274] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.349488] env[61648]: INFO nova.compute.manager [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Took 1.07 seconds to destroy the instance on the hypervisor. [ 765.349806] env[61648]: DEBUG oslo.service.loopingcall [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.350062] env[61648]: DEBUG nova.compute.manager [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.350196] env[61648]: DEBUG nova.network.neutron [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.368790] env[61648]: DEBUG nova.network.neutron [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.409398] env[61648]: DEBUG nova.network.neutron [-] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.492523] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 3ca295b7-50e2-4b6b-8033-991328a43f3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.871540] env[61648]: DEBUG nova.network.neutron [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.911656] env[61648]: INFO nova.compute.manager [-] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Took 1.03 seconds to deallocate network for instance. [ 765.913822] env[61648]: DEBUG nova.compute.claims [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 765.914000] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.995812] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance af5e6f7b-7c21-44d1-a05c-0d34f59c0065 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.375344] env[61648]: INFO nova.compute.manager [-] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Took 1.02 seconds to deallocate network for instance. [ 766.498953] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance c633de1e-3dfb-4304-ac9f-d8f4a2b725d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.880981] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.002278] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 9cc301f6-45de-43b9-a88d-d94e3f00cff3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.509856] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance acbab424-c325-4e57-81a2-3d4a1ae4a081 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.013070] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance e199f125-9259-4268-9aaf-1f4d10da9a34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.520213] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance c399cd8d-6cad-43d8-9226-36f9d9c247e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.024010] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 32a2c7ce-2980-4eac-ad52-b8d5d67d669b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.533675] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance acc5b6cb-16ee-4756-9088-fa094eb83daa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.039342] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.542914] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance db35b417-bcdb-4380-927a-f755e6421624 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.048021] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance aa8fb674-60e3-431c-b8c3-9cc548965e18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.048021] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 771.048180] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 771.326127] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6068de3c-e866-4f7d-9ed2-be2ab4e9b612 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.333685] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e17ac8-d4c3-401b-aec5-eebf45469753 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.078080] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc003e1-7d3e-4f0a-98b7-2165366a7363 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.086197] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82eb46a-4085-4c8d-9e71-9c4522ddd038 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.099360] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 772.643524] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 772.643748] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 86 to 87 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 772.643898] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 773.148367] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 773.148716] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.235s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.148905] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.463s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.155045] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.155045] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Cleaning up deleted instances {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 773.662430] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] There are 3 instances to clean {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 773.662694] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 55630bdb-fe38-49dc-baa2-2ac5de20e569] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 774.044523] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94aa879-cb68-4296-a27b-0e461fda19f1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.052239] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635df654-5e6c-40c8-ac5b-a4f8248e71f9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.082864] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fa06b9-b873-401d-9af6-251d3349b60a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.090016] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e5d20b-7ce6-404a-973a-8a09b663dc80 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.103090] env[61648]: DEBUG nova.compute.provider_tree [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.167611] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e918b827-ea37-4589-8999-e363aba4492d] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 774.606235] env[61648]: DEBUG nova.scheduler.client.report [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.672133] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 767c2c81-2508-4dcd-97d7-28726c2c6d31] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 775.111684] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.963s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.112303] env[61648]: ERROR nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Traceback (most recent call last): [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self.driver.spawn(context, instance, image_meta, [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] vm_ref = self.build_virtual_machine(instance, [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] vif_infos = vmwarevif.get_vif_info(self._session, [ 775.112303] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] for vif in network_info: [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] return self._sync_wrapper(fn, *args, **kwargs) [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self.wait() [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self[:] = self._gt.wait() [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] return self._exit_event.wait() [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] current.throw(*self._exc) [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.112607] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] result = function(*args, **kwargs) [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] return func(*args, **kwargs) [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] raise e [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] nwinfo = self.network_api.allocate_for_instance( [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] created_port_ids = self._update_ports_for_instance( [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] with excutils.save_and_reraise_exception(): [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] self.force_reraise() [ 775.112937] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] raise self.value [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] updated_port = self._update_port( [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] _ensure_no_port_binding_failure(port) [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] raise exception.PortBindingFailed(port_id=port['id']) [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] nova.exception.PortBindingFailed: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. [ 775.113254] env[61648]: ERROR nova.compute.manager [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] [ 775.113254] env[61648]: DEBUG nova.compute.utils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 775.114706] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.665s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.117628] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Build of instance 831da774-5e37-4d49-a1fd-3eb421c7fcb7 was re-scheduled: Binding failed for port d5cf109b-2ef4-4737-8883-e77a128d2dcb, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 775.118134] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 775.118360] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.118502] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.118658] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.178993] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.179287] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Cleaning up deleted instances with incomplete migration {{(pid=61648) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 775.652800] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.682008] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.765084] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.887891] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730676fd-2215-487e-822d-c167f94510ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.895229] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9438ec46-a705-4c31-91f4-c80c3f7156d6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.925744] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf715b1-36e3-4126-a477-5e4ed7f6218f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.932911] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc01adac-6dd1-4658-b59c-d0bfee149269 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.945208] env[61648]: DEBUG nova.compute.provider_tree [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.271532] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-831da774-5e37-4d49-a1fd-3eb421c7fcb7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.271532] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 776.271532] env[61648]: DEBUG nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.271532] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.285439] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.451198] env[61648]: DEBUG nova.scheduler.client.report [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.789031] env[61648]: DEBUG nova.network.neutron [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.956021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.839s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.956021] env[61648]: ERROR nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Traceback (most recent call last): [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self.driver.spawn(context, instance, image_meta, [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.956021] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] vm_ref = self.build_virtual_machine(instance, [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] for vif in network_info: [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return self._sync_wrapper(fn, *args, **kwargs) [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self.wait() [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self[:] = self._gt.wait() [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return self._exit_event.wait() [ 776.956313] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] result = hub.switch() [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return self.greenlet.switch() [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] result = function(*args, **kwargs) [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] return func(*args, **kwargs) [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] raise e [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] nwinfo = self.network_api.allocate_for_instance( [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.956583] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] created_port_ids = self._update_ports_for_instance( [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] with excutils.save_and_reraise_exception(): [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] self.force_reraise() [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] raise self.value [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] updated_port = self._update_port( [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] _ensure_no_port_binding_failure(port) [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.956844] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] raise exception.PortBindingFailed(port_id=port['id']) [ 776.957109] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] nova.exception.PortBindingFailed: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. [ 776.957109] env[61648]: ERROR nova.compute.manager [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] [ 776.957109] env[61648]: DEBUG nova.compute.utils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 776.961020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.452s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.961020] env[61648]: INFO nova.compute.claims [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.961989] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Build of instance 14854fd0-680a-48a2-b1d6-50e75624aef2 was re-scheduled: Binding failed for port a6bd2153-910b-4f71-acb0-1f550dc324b8, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 776.962587] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 776.962927] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.963224] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquired lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.963500] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.293482] env[61648]: INFO nova.compute.manager [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 831da774-5e37-4d49-a1fd-3eb421c7fcb7] Took 1.02 seconds to deallocate network for instance. [ 777.485541] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.564029] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.068048] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Releasing lock "refresh_cache-14854fd0-680a-48a2-b1d6-50e75624aef2" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.068048] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 778.068048] env[61648]: DEBUG nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 778.068048] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 778.083356] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.255551] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d363a877-34d0-448c-bc2b-3bc0fe592c68 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.263412] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e93ddcb-6a43-44a0-8c43-1fef21c3f123 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.291912] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbec06c-4a10-4dc0-9d17-40a0cbf44e5f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.298433] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266c32e5-018d-42bf-a6e4-7a86d251ca19 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.313587] env[61648]: DEBUG nova.compute.provider_tree [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.330059] env[61648]: INFO nova.scheduler.client.report [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Deleted allocations for instance 831da774-5e37-4d49-a1fd-3eb421c7fcb7 [ 778.586579] env[61648]: DEBUG nova.network.neutron [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.816029] env[61648]: DEBUG nova.scheduler.client.report [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.837562] env[61648]: DEBUG oslo_concurrency.lockutils [None req-73767ff2-a5f4-4684-be2c-58f6fce37f11 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "831da774-5e37-4d49-a1fd-3eb421c7fcb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 174.627s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.091655] env[61648]: INFO nova.compute.manager [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: 14854fd0-680a-48a2-b1d6-50e75624aef2] Took 1.02 seconds to deallocate network for instance. [ 779.320888] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.321443] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 779.324548] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.891s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.326119] env[61648]: INFO nova.compute.claims [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 779.340140] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 779.828019] env[61648]: DEBUG nova.compute.utils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 779.828019] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 779.828019] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 779.860020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.877798] env[61648]: DEBUG nova.policy [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e0f547b01e94904800d532194a53be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f61b9de98314d118496a0b69e59a6e9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 780.129039] env[61648]: INFO nova.scheduler.client.report [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Deleted allocations for instance 14854fd0-680a-48a2-b1d6-50e75624aef2 [ 780.280129] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Successfully created port: dc2bb52d-bd0e-4697-8740-bc2adfa0c416 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.331045] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 780.646409] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ac4f4cbd-778b-4aaa-8521-f4fd349b3cd0 tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "14854fd0-680a-48a2-b1d6-50e75624aef2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.382s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.680990] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Successfully created port: e65a48a4-5a2f-4e64-a43a-b049fbed8e5b {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.697096] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5931d211-460e-4480-97c1-3c36ab00720e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.705352] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb02a72c-189c-47bb-a62d-30ca3782b570 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.741064] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15dd6525-5ac8-461d-a6c0-c7737e735a6f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.749959] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5a1254-45a6-4ebc-8da5-5a9929744ead {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.763552] env[61648]: DEBUG nova.compute.provider_tree [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.156275] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 781.256754] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.257027] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.270906] env[61648]: DEBUG nova.scheduler.client.report [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.341083] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 781.372576] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 781.372811] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 781.372967] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 781.373162] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 781.373307] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 781.373452] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 781.373659] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 781.373827] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 781.374016] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 781.374231] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 781.374407] env[61648]: DEBUG nova.virt.hardware [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 781.375972] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc53a77-4684-4551-a62c-b0deb451489c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.383806] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c4c477-f6b6-44de-b8a9-401906c2567b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.541863] env[61648]: DEBUG nova.compute.manager [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Received event network-changed-dc2bb52d-bd0e-4697-8740-bc2adfa0c416 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 781.543653] env[61648]: DEBUG nova.compute.manager [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Refreshing instance network info cache due to event network-changed-dc2bb52d-bd0e-4697-8740-bc2adfa0c416. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 781.543653] env[61648]: DEBUG oslo_concurrency.lockutils [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] Acquiring lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.543653] env[61648]: DEBUG oslo_concurrency.lockutils [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] Acquired lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.543653] env[61648]: DEBUG nova.network.neutron [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Refreshing network info cache for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.683134] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.776599] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.777144] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 781.779786] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.692s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.781161] env[61648]: INFO nova.compute.claims [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.812188] env[61648]: ERROR nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 781.812188] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.812188] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.812188] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.812188] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.812188] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.812188] env[61648]: ERROR nova.compute.manager raise self.value [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.812188] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 781.812188] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.812188] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 781.812622] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.812622] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 781.812622] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 781.812622] env[61648]: ERROR nova.compute.manager [ 781.812622] env[61648]: Traceback (most recent call last): [ 781.812622] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 781.812622] env[61648]: listener.cb(fileno) [ 781.812622] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.812622] env[61648]: result = function(*args, **kwargs) [ 781.812622] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 781.812622] env[61648]: return func(*args, **kwargs) [ 781.812622] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.812622] env[61648]: raise e [ 781.812622] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.812622] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 781.812622] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.812622] env[61648]: created_port_ids = self._update_ports_for_instance( [ 781.812622] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.812622] env[61648]: with excutils.save_and_reraise_exception(): [ 781.812622] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.812622] env[61648]: self.force_reraise() [ 781.812622] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.812622] env[61648]: raise self.value [ 781.812622] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.812622] env[61648]: updated_port = self._update_port( [ 781.812622] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.812622] env[61648]: _ensure_no_port_binding_failure(port) [ 781.812622] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.812622] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 781.813283] env[61648]: nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 781.813283] env[61648]: Removing descriptor: 19 [ 781.813564] env[61648]: ERROR nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Traceback (most recent call last): [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] yield resources [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self.driver.spawn(context, instance, image_meta, [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] vm_ref = self.build_virtual_machine(instance, [ 781.813564] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] vif_infos = vmwarevif.get_vif_info(self._session, [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] for vif in network_info: [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return self._sync_wrapper(fn, *args, **kwargs) [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self.wait() [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self[:] = self._gt.wait() [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return self._exit_event.wait() [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.813881] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] result = hub.switch() [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return self.greenlet.switch() [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] result = function(*args, **kwargs) [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return func(*args, **kwargs) [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] raise e [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] nwinfo = self.network_api.allocate_for_instance( [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] created_port_ids = self._update_ports_for_instance( [ 781.814292] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] with excutils.save_and_reraise_exception(): [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self.force_reraise() [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] raise self.value [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] updated_port = self._update_port( [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] _ensure_no_port_binding_failure(port) [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] raise exception.PortBindingFailed(port_id=port['id']) [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 781.814709] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] [ 781.815046] env[61648]: INFO nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Terminating instance [ 781.816425] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.100978] env[61648]: DEBUG nova.network.neutron [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.253464] env[61648]: DEBUG nova.network.neutron [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.288993] env[61648]: DEBUG nova.compute.utils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 782.292214] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 782.292370] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.359909] env[61648]: DEBUG nova.policy [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd0ea28c570a4ec0b2d80610d1058ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c462519f94804f51a1cef38c1085b47d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 782.677142] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Successfully created port: d98545f7-8d23-473e-8881-42e0cb6de758 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.757642] env[61648]: DEBUG oslo_concurrency.lockutils [req-f331ff29-0e35-4039-ac50-4398cf9ae690 req-63033d19-a7fa-4bc3-831e-754028fc57e6 service nova] Releasing lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.758107] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquired lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.758308] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.792658] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 783.103733] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63366bd1-d9dd-4ba0-8a43-57eb7093b883 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.114321] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993883a0-95ac-4f8c-b22c-6ca13cd31a82 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.150958] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c623050c-8cea-43be-a8c8-0d1eebc7760a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.162023] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7818cb-85fb-49dc-a2c5-93936f0c5432 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.176874] env[61648]: DEBUG nova.compute.provider_tree [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.332184] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.427643] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "b3ae23d8-e28e-460c-b9a3-4744f81f39ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.427643] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "b3ae23d8-e28e-460c-b9a3-4744f81f39ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.467477] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.567619] env[61648]: DEBUG nova.compute.manager [req-59056443-15de-4980-b91b-1b403d672f33 req-8292e286-4e0f-4051-8a07-9eab66ab3fbe service nova] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Received event network-vif-deleted-dc2bb52d-bd0e-4697-8740-bc2adfa0c416 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.681099] env[61648]: DEBUG nova.scheduler.client.report [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 783.801184] env[61648]: ERROR nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 783.801184] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 783.801184] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 783.801184] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 783.801184] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 783.801184] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 783.801184] env[61648]: ERROR nova.compute.manager raise self.value [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 783.801184] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 783.801184] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 783.801184] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 783.801556] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 783.801556] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 783.801556] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 783.801556] env[61648]: ERROR nova.compute.manager [ 783.801556] env[61648]: Traceback (most recent call last): [ 783.801556] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 783.801556] env[61648]: listener.cb(fileno) [ 783.801556] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 783.801556] env[61648]: result = function(*args, **kwargs) [ 783.801556] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 783.801556] env[61648]: return func(*args, **kwargs) [ 783.801556] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 783.801556] env[61648]: raise e [ 783.801556] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 783.801556] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 783.801556] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 783.801556] env[61648]: created_port_ids = self._update_ports_for_instance( [ 783.801556] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 783.801556] env[61648]: with excutils.save_and_reraise_exception(): [ 783.801556] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 783.801556] env[61648]: self.force_reraise() [ 783.801556] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 783.801556] env[61648]: raise self.value [ 783.801556] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 783.801556] env[61648]: updated_port = self._update_port( [ 783.801556] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 783.801556] env[61648]: _ensure_no_port_binding_failure(port) [ 783.801556] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 783.801556] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 783.802330] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 783.802330] env[61648]: Removing descriptor: 19 [ 783.808907] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 783.847373] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 783.847733] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 783.849282] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.849552] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 783.849759] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.850340] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 783.850635] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 783.853233] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 783.853427] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 783.853595] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 783.853768] env[61648]: DEBUG nova.virt.hardware [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 783.854679] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66151b6-9a88-414f-bd05-68c024e2166b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.865700] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c4748d-6e09-4c92-bbd8-d3f065cda0d1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.880328] env[61648]: ERROR nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Traceback (most recent call last): [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] yield resources [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self.driver.spawn(context, instance, image_meta, [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self._vmops.spawn(context, instance, image_meta, injected_files, [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] vm_ref = self.build_virtual_machine(instance, [ 783.880328] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] vif_infos = vmwarevif.get_vif_info(self._session, [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] for vif in network_info: [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] return self._sync_wrapper(fn, *args, **kwargs) [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self.wait() [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self[:] = self._gt.wait() [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] return self._exit_event.wait() [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 783.880704] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] current.throw(*self._exc) [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] result = function(*args, **kwargs) [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] return func(*args, **kwargs) [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] raise e [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] nwinfo = self.network_api.allocate_for_instance( [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] created_port_ids = self._update_ports_for_instance( [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] with excutils.save_and_reraise_exception(): [ 783.881028] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self.force_reraise() [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] raise self.value [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] updated_port = self._update_port( [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] _ensure_no_port_binding_failure(port) [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] raise exception.PortBindingFailed(port_id=port['id']) [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 783.881342] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] [ 783.881342] env[61648]: INFO nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Terminating instance [ 783.883498] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquiring lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.883723] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquired lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.883935] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.972426] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Releasing lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.972860] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 783.973062] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.973364] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d34a9cd-aae6-4cf7-b141-486d896f542f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.983159] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd21ab5-539a-4516-87fd-bb4cb4bf9847 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.009971] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c could not be found. [ 784.010300] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.010552] env[61648]: INFO nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 784.010847] env[61648]: DEBUG oslo.service.loopingcall [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.011122] env[61648]: DEBUG nova.compute.manager [-] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 784.011274] env[61648]: DEBUG nova.network.neutron [-] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.038736] env[61648]: DEBUG nova.network.neutron [-] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.186692] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.187512] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.191156] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.373s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.195365] env[61648]: INFO nova.compute.claims [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.409827] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.502764] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.705064] env[61648]: DEBUG nova.compute.utils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.705064] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 784.705064] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.744991] env[61648]: DEBUG nova.policy [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75788746b2214f2e8c1a8884c89ddb9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd94e7e89f424d34920f0fa92acf3226', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 784.782367] env[61648]: DEBUG nova.network.neutron [-] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.987873] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Successfully created port: 89a6b845-6769-4c0a-bfb3-67030d4c6af3 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.002726] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Releasing lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.003164] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 785.003358] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.003778] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6eaabb9-be3e-4949-b5db-a2e82e993cc5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.013281] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54187f3c-f44e-4415-88d8-61aedf9c202f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.034338] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ffb6b3e0-5602-4c28-958d-22265337e236 could not be found. [ 785.034589] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.034941] env[61648]: INFO nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Took 0.03 seconds to destroy the instance on the hypervisor. [ 785.034999] env[61648]: DEBUG oslo.service.loopingcall [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.035241] env[61648]: DEBUG nova.compute.manager [-] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 785.035337] env[61648]: DEBUG nova.network.neutron [-] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.054168] env[61648]: DEBUG nova.network.neutron [-] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.212638] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.285694] env[61648]: INFO nova.compute.manager [-] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Took 1.27 seconds to deallocate network for instance. [ 785.291665] env[61648]: DEBUG nova.compute.claims [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 785.291855] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.560890] env[61648]: DEBUG nova.network.neutron [-] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.572586] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9891ed9-6d74-452e-a426-563a3ecb960d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.581595] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9537d522-0625-42e2-b644-40808bc06987 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.614931] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a65c2f9-358e-4339-bcde-e1c5772c7c4d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.622269] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f97204-e687-4c48-9877-6e882c125f84 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.628020] env[61648]: DEBUG nova.compute.manager [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Received event network-changed-d98545f7-8d23-473e-8881-42e0cb6de758 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 785.628302] env[61648]: DEBUG nova.compute.manager [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Refreshing instance network info cache due to event network-changed-d98545f7-8d23-473e-8881-42e0cb6de758. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 785.628529] env[61648]: DEBUG oslo_concurrency.lockutils [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] Acquiring lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.628669] env[61648]: DEBUG oslo_concurrency.lockutils [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] Acquired lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.628823] env[61648]: DEBUG nova.network.neutron [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Refreshing network info cache for port d98545f7-8d23-473e-8881-42e0cb6de758 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.641923] env[61648]: DEBUG nova.compute.provider_tree [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.058907] env[61648]: ERROR nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 786.058907] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.058907] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.058907] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.058907] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.058907] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.058907] env[61648]: ERROR nova.compute.manager raise self.value [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.058907] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 786.058907] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.058907] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 786.059365] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.059365] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 786.059365] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 786.059365] env[61648]: ERROR nova.compute.manager [ 786.059365] env[61648]: Traceback (most recent call last): [ 786.059365] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 786.059365] env[61648]: listener.cb(fileno) [ 786.059365] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.059365] env[61648]: result = function(*args, **kwargs) [ 786.059365] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.059365] env[61648]: return func(*args, **kwargs) [ 786.059365] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.059365] env[61648]: raise e [ 786.059365] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.059365] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 786.059365] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.059365] env[61648]: created_port_ids = self._update_ports_for_instance( [ 786.059365] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.059365] env[61648]: with excutils.save_and_reraise_exception(): [ 786.059365] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.059365] env[61648]: self.force_reraise() [ 786.059365] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.059365] env[61648]: raise self.value [ 786.059365] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.059365] env[61648]: updated_port = self._update_port( [ 786.059365] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.059365] env[61648]: _ensure_no_port_binding_failure(port) [ 786.059365] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.059365] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 786.060431] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 786.060431] env[61648]: Removing descriptor: 19 [ 786.067130] env[61648]: INFO nova.compute.manager [-] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Took 1.03 seconds to deallocate network for instance. [ 786.069372] env[61648]: DEBUG nova.compute.claims [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 786.069372] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.149022] env[61648]: DEBUG nova.scheduler.client.report [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.158321] env[61648]: DEBUG nova.network.neutron [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.228984] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.234784] env[61648]: DEBUG nova.network.neutron [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.259095] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.259095] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.259095] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.259300] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.259300] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.259300] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.259300] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.259300] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.259699] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.259981] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.260287] env[61648]: DEBUG nova.virt.hardware [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.261426] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce49a8f-5034-42ec-a358-e798af7becaa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.270599] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5629bd20-0cbf-4397-be24-b7ae0fcd5a48 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.285320] env[61648]: ERROR nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Traceback (most recent call last): [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] yield resources [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self.driver.spawn(context, instance, image_meta, [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] vm_ref = self.build_virtual_machine(instance, [ 786.285320] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] for vif in network_info: [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] return self._sync_wrapper(fn, *args, **kwargs) [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self.wait() [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self[:] = self._gt.wait() [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] return self._exit_event.wait() [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 786.285619] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] current.throw(*self._exc) [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] result = function(*args, **kwargs) [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] return func(*args, **kwargs) [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] raise e [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] nwinfo = self.network_api.allocate_for_instance( [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] created_port_ids = self._update_ports_for_instance( [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] with excutils.save_and_reraise_exception(): [ 786.285903] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self.force_reraise() [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] raise self.value [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] updated_port = self._update_port( [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] _ensure_no_port_binding_failure(port) [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] raise exception.PortBindingFailed(port_id=port['id']) [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 786.286206] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] [ 786.288016] env[61648]: INFO nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Terminating instance [ 786.288856] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.289137] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.289407] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.654019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.654019] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 786.657412] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.532s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.659129] env[61648]: INFO nova.compute.claims [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.737556] env[61648]: DEBUG oslo_concurrency.lockutils [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] Releasing lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.737556] env[61648]: DEBUG nova.compute.manager [req-c3a332ff-dfb9-478a-8cd8-37da8c5456c4 req-0a947ccc-5b95-4a69-844a-fe6bc1e6e670 service nova] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Received event network-vif-deleted-d98545f7-8d23-473e-8881-42e0cb6de758 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.807656] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.892940] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.165024] env[61648]: DEBUG nova.compute.utils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.166869] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 787.167150] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 787.216573] env[61648]: DEBUG nova.policy [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd47b065fca5a49b5b03a2031ccdba340', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36971d41fd2a431a96eb0c59dc4a0a3a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 787.398017] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.398017] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 787.398017] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 787.398017] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-225d1b3f-6b24-4742-9a83-448fca1ca242 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.404961] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91184fa-582b-4937-9214-a95a3415b309 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.429018] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6d9ab9ac-d892-47e7-9b86-a2dce40a4568 could not be found. [ 787.429018] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.429018] env[61648]: INFO nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Took 0.03 seconds to destroy the instance on the hypervisor. [ 787.429018] env[61648]: DEBUG oslo.service.loopingcall [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.429018] env[61648]: DEBUG nova.compute.manager [-] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.429018] env[61648]: DEBUG nova.network.neutron [-] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.444623] env[61648]: DEBUG nova.network.neutron [-] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.530362] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Successfully created port: a5f2ac34-743c-493f-88cd-b1a4f5166a12 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.661510] env[61648]: DEBUG nova.compute.manager [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Received event network-changed-89a6b845-6769-4c0a-bfb3-67030d4c6af3 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.661510] env[61648]: DEBUG nova.compute.manager [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Refreshing instance network info cache due to event network-changed-89a6b845-6769-4c0a-bfb3-67030d4c6af3. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.663921] env[61648]: DEBUG oslo_concurrency.lockutils [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] Acquiring lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.663921] env[61648]: DEBUG oslo_concurrency.lockutils [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] Acquired lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.663921] env[61648]: DEBUG nova.network.neutron [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Refreshing network info cache for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.668337] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 787.945938] env[61648]: DEBUG nova.network.neutron [-] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.975204] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d836ff55-aa13-413d-a9a6-a3977d167c0c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.981867] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64028a0b-23e8-486e-9736-ff54c1c39f7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.015865] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1007e1fc-0d36-406d-b357-648ef3ac1d38 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.024022] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de49b3c-a2ec-49c6-8642-43e45f2901d0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.039025] env[61648]: DEBUG nova.compute.provider_tree [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.181772] env[61648]: DEBUG nova.network.neutron [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.280407] env[61648]: DEBUG nova.network.neutron [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.450697] env[61648]: INFO nova.compute.manager [-] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Took 1.02 seconds to deallocate network for instance. [ 788.453303] env[61648]: DEBUG nova.compute.claims [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 788.453443] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.532510] env[61648]: ERROR nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 788.532510] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.532510] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.532510] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.532510] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.532510] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.532510] env[61648]: ERROR nova.compute.manager raise self.value [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.532510] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 788.532510] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.532510] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 788.533156] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.533156] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 788.533156] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 788.533156] env[61648]: ERROR nova.compute.manager [ 788.533156] env[61648]: Traceback (most recent call last): [ 788.533156] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 788.533156] env[61648]: listener.cb(fileno) [ 788.533156] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.533156] env[61648]: result = function(*args, **kwargs) [ 788.533156] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.533156] env[61648]: return func(*args, **kwargs) [ 788.533156] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.533156] env[61648]: raise e [ 788.533156] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.533156] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 788.533156] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.533156] env[61648]: created_port_ids = self._update_ports_for_instance( [ 788.533156] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.533156] env[61648]: with excutils.save_and_reraise_exception(): [ 788.533156] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.533156] env[61648]: self.force_reraise() [ 788.533156] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.533156] env[61648]: raise self.value [ 788.533156] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.533156] env[61648]: updated_port = self._update_port( [ 788.533156] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.533156] env[61648]: _ensure_no_port_binding_failure(port) [ 788.533156] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.533156] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 788.533891] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 788.533891] env[61648]: Removing descriptor: 19 [ 788.543268] env[61648]: DEBUG nova.scheduler.client.report [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.682518] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 788.711042] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 788.711315] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 788.711473] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 788.711651] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 788.711793] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 788.711936] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 788.712177] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 788.712378] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 788.712558] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 788.712781] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 788.712894] env[61648]: DEBUG nova.virt.hardware [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 788.713788] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e02566c-c11a-4883-b587-ebab15d0efc7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.722733] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d5f568-de47-4d8f-85be-fb0b88287a75 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.735271] env[61648]: ERROR nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Traceback (most recent call last): [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] yield resources [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self.driver.spawn(context, instance, image_meta, [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] vm_ref = self.build_virtual_machine(instance, [ 788.735271] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] for vif in network_info: [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] return self._sync_wrapper(fn, *args, **kwargs) [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self.wait() [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self[:] = self._gt.wait() [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] return self._exit_event.wait() [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 788.735652] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] current.throw(*self._exc) [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] result = function(*args, **kwargs) [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] return func(*args, **kwargs) [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] raise e [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] nwinfo = self.network_api.allocate_for_instance( [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] created_port_ids = self._update_ports_for_instance( [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] with excutils.save_and_reraise_exception(): [ 788.736040] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self.force_reraise() [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] raise self.value [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] updated_port = self._update_port( [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] _ensure_no_port_binding_failure(port) [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] raise exception.PortBindingFailed(port_id=port['id']) [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 788.736374] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] [ 788.736374] env[61648]: INFO nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Terminating instance [ 788.737788] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquiring lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.737949] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquired lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.738128] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.782585] env[61648]: DEBUG oslo_concurrency.lockutils [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] Releasing lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.782862] env[61648]: DEBUG nova.compute.manager [req-31cbca34-ec77-4301-8f2c-dd5c26b60246 req-c2e6f0ea-1979-4164-9920-bef5fec04f6c service nova] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Received event network-vif-deleted-89a6b845-6769-4c0a-bfb3-67030d4c6af3 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 789.048739] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.049330] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 789.052206] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.243s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.054006] env[61648]: INFO nova.compute.claims [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.257616] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.342082] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.561616] env[61648]: DEBUG nova.compute.utils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 789.563659] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 789.563857] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 789.613685] env[61648]: DEBUG nova.policy [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '876b46a0e3c542eb9e267f0e0615123c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ccacb6024de431092dd0610c5ca38cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 789.688443] env[61648]: DEBUG nova.compute.manager [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Received event network-changed-a5f2ac34-743c-493f-88cd-b1a4f5166a12 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 789.688682] env[61648]: DEBUG nova.compute.manager [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Refreshing instance network info cache due to event network-changed-a5f2ac34-743c-493f-88cd-b1a4f5166a12. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 789.688868] env[61648]: DEBUG oslo_concurrency.lockutils [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] Acquiring lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.845090] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Releasing lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.845533] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 789.845730] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 789.846050] env[61648]: DEBUG oslo_concurrency.lockutils [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] Acquired lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.846244] env[61648]: DEBUG nova.network.neutron [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Refreshing network info cache for port a5f2ac34-743c-493f-88cd-b1a4f5166a12 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.847345] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed8f9ff2-a4b5-4b53-aeb8-ee6f672440f4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.858343] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e678d414-4a2c-49ab-9525-a02f72c3e113 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.880966] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26534fec-8cf8-4cdd-a91f-e63afabc0d57 could not be found. [ 789.881215] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.881395] env[61648]: INFO nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Took 0.04 seconds to destroy the instance on the hypervisor. [ 789.881638] env[61648]: DEBUG oslo.service.loopingcall [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.881855] env[61648]: DEBUG nova.compute.manager [-] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.881951] env[61648]: DEBUG nova.network.neutron [-] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.896729] env[61648]: DEBUG nova.network.neutron [-] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.973947] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Successfully created port: 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.064614] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 790.375900] env[61648]: DEBUG nova.network.neutron [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.400227] env[61648]: DEBUG nova.network.neutron [-] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.416030] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03416f4f-9ce0-40a7-b013-926b36c607f0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.422522] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740f73d0-5b8d-44f0-9a86-6e8bb035ebbb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.457423] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97fa226-385b-4715-b7b3-f9dab2140fdf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.466128] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6a4fb8-8849-46c9-9788-867bb73098c3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.481109] env[61648]: DEBUG nova.compute.provider_tree [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.519390] env[61648]: DEBUG nova.network.neutron [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.905100] env[61648]: INFO nova.compute.manager [-] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Took 1.02 seconds to deallocate network for instance. [ 790.908975] env[61648]: DEBUG nova.compute.claims [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 790.909238] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.984037] env[61648]: DEBUG nova.scheduler.client.report [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 791.022039] env[61648]: DEBUG oslo_concurrency.lockutils [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] Releasing lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.022177] env[61648]: DEBUG nova.compute.manager [req-d8a485da-b181-4623-aa90-616ded706dd7 req-0faaf234-cd31-4cb7-88bc-b882fa60f6ba service nova] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Received event network-vif-deleted-a5f2ac34-743c-493f-88cd-b1a4f5166a12 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.081035] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 791.097296] env[61648]: ERROR nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 791.097296] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.097296] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 791.097296] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 791.097296] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.097296] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.097296] env[61648]: ERROR nova.compute.manager raise self.value [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 791.097296] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 791.097296] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.097296] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 791.097737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.097737] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 791.097737] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 791.097737] env[61648]: ERROR nova.compute.manager [ 791.097737] env[61648]: Traceback (most recent call last): [ 791.097737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 791.097737] env[61648]: listener.cb(fileno) [ 791.097737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.097737] env[61648]: result = function(*args, **kwargs) [ 791.097737] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.097737] env[61648]: return func(*args, **kwargs) [ 791.097737] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.097737] env[61648]: raise e [ 791.097737] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.097737] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 791.097737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 791.097737] env[61648]: created_port_ids = self._update_ports_for_instance( [ 791.097737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 791.097737] env[61648]: with excutils.save_and_reraise_exception(): [ 791.097737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.097737] env[61648]: self.force_reraise() [ 791.097737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.097737] env[61648]: raise self.value [ 791.097737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 791.097737] env[61648]: updated_port = self._update_port( [ 791.097737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.097737] env[61648]: _ensure_no_port_binding_failure(port) [ 791.097737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.097737] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 791.098536] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 791.098536] env[61648]: Removing descriptor: 19 [ 791.116151] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.116768] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.117092] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.117759] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.118873] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.119062] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.120314] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.121357] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.121357] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.121357] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.121357] env[61648]: DEBUG nova.virt.hardware [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.122061] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dd10de-f443-4a89-a948-3aec9928c4de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.132978] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75afcfb-225a-4551-bbcc-620f45a6b6da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.147982] env[61648]: ERROR nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Traceback (most recent call last): [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] yield resources [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self.driver.spawn(context, instance, image_meta, [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] vm_ref = self.build_virtual_machine(instance, [ 791.147982] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] for vif in network_info: [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] return self._sync_wrapper(fn, *args, **kwargs) [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self.wait() [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self[:] = self._gt.wait() [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] return self._exit_event.wait() [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 791.149542] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] current.throw(*self._exc) [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] result = function(*args, **kwargs) [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] return func(*args, **kwargs) [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] raise e [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] nwinfo = self.network_api.allocate_for_instance( [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] created_port_ids = self._update_ports_for_instance( [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] with excutils.save_and_reraise_exception(): [ 791.149866] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self.force_reraise() [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] raise self.value [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] updated_port = self._update_port( [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] _ensure_no_port_binding_failure(port) [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] raise exception.PortBindingFailed(port_id=port['id']) [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 791.150183] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] [ 791.150183] env[61648]: INFO nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Terminating instance [ 791.152026] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.152026] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquired lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.152026] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.488826] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.489401] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 791.492528] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.557s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.670101] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.726154] env[61648]: DEBUG nova.compute.manager [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Received event network-changed-0d0c85c4-7e97-4a61-9c8b-254ffd11fba9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.726368] env[61648]: DEBUG nova.compute.manager [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Refreshing instance network info cache due to event network-changed-0d0c85c4-7e97-4a61-9c8b-254ffd11fba9. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 791.726433] env[61648]: DEBUG oslo_concurrency.lockutils [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] Acquiring lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.793166] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.004349] env[61648]: DEBUG nova.compute.utils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 792.006511] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 792.006798] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 792.060093] env[61648]: DEBUG nova.policy [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbda70b6d0b44b22b73fe403763c4fbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7997df941d37470cbf8055742a44064d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 792.299022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Releasing lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.299022] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 792.299022] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 792.299022] env[61648]: DEBUG oslo_concurrency.lockutils [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] Acquired lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.299022] env[61648]: DEBUG nova.network.neutron [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Refreshing network info cache for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.299268] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f04f3af-e8b7-469b-8f35-8c16f29ac153 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.309513] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2190fb-306e-4fe2-90df-87fe9961d58c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.339111] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d4b580e9-aae2-4c14-abd8-c6a08f0a576c could not be found. [ 792.339111] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.339111] env[61648]: INFO nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 792.339111] env[61648]: DEBUG oslo.service.loopingcall [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.340435] env[61648]: DEBUG nova.compute.manager [-] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 792.340713] env[61648]: DEBUG nova.network.neutron [-] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.360929] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6269ece-a586-457d-8a9d-84b442cc118d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.368576] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22474f0-52e5-42b7-bd40-38234be29014 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.372875] env[61648]: DEBUG nova.network.neutron [-] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.410662] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Successfully created port: 90b8b3c4-9afe-4d0c-a854-9a4b52282293 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.413106] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2081cd3a-1b61-4e37-ace2-1312b7229256 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.421867] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cfa5d9-a1b0-44a2-b5e0-d06eab881be6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.437564] env[61648]: DEBUG nova.compute.provider_tree [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.509887] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 792.820057] env[61648]: DEBUG nova.network.neutron [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.876690] env[61648]: DEBUG nova.network.neutron [-] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.942487] env[61648]: DEBUG nova.scheduler.client.report [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.957590] env[61648]: DEBUG nova.network.neutron [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.381732] env[61648]: INFO nova.compute.manager [-] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Took 1.04 seconds to deallocate network for instance. [ 793.384273] env[61648]: DEBUG nova.compute.claims [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 793.384561] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.452071] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.452755] env[61648]: ERROR nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Traceback (most recent call last): [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self.driver.spawn(context, instance, image_meta, [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] vm_ref = self.build_virtual_machine(instance, [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.452755] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] for vif in network_info: [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return self._sync_wrapper(fn, *args, **kwargs) [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self.wait() [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self[:] = self._gt.wait() [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return self._exit_event.wait() [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] result = hub.switch() [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 793.454417] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return self.greenlet.switch() [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] result = function(*args, **kwargs) [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] return func(*args, **kwargs) [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] raise e [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] nwinfo = self.network_api.allocate_for_instance( [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] created_port_ids = self._update_ports_for_instance( [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] with excutils.save_and_reraise_exception(): [ 793.454810] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] self.force_reraise() [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] raise self.value [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] updated_port = self._update_port( [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] _ensure_no_port_binding_failure(port) [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] raise exception.PortBindingFailed(port_id=port['id']) [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] nova.exception.PortBindingFailed: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. [ 793.455182] env[61648]: ERROR nova.compute.manager [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] [ 793.455493] env[61648]: DEBUG nova.compute.utils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 793.455493] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.176s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.455493] env[61648]: DEBUG nova.objects.instance [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61648) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 793.458063] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Build of instance d76d8aed-9126-4d21-9df9-6317c3b19f65 was re-scheduled: Binding failed for port 1756d1ff-7e88-4633-b466-c45f9bcb0279, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 793.458566] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 793.458792] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquiring lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.458936] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Acquired lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.459107] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.460273] env[61648]: DEBUG oslo_concurrency.lockutils [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] Releasing lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.460493] env[61648]: DEBUG nova.compute.manager [req-3b10655f-021f-4c7a-817f-8614e5d2b725 req-2fb97ed6-d178-4930-aaca-9f312917d7ac service nova] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Received event network-vif-deleted-0d0c85c4-7e97-4a61-9c8b-254ffd11fba9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.519668] env[61648]: ERROR nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 793.519668] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.519668] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.519668] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.519668] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.519668] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.519668] env[61648]: ERROR nova.compute.manager raise self.value [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.519668] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 793.519668] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.519668] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 793.520246] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.520246] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 793.520246] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 793.520246] env[61648]: ERROR nova.compute.manager [ 793.520246] env[61648]: Traceback (most recent call last): [ 793.520246] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 793.520246] env[61648]: listener.cb(fileno) [ 793.520246] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.520246] env[61648]: result = function(*args, **kwargs) [ 793.520246] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 793.520246] env[61648]: return func(*args, **kwargs) [ 793.520246] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 793.520246] env[61648]: raise e [ 793.520246] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.520246] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 793.520246] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.520246] env[61648]: created_port_ids = self._update_ports_for_instance( [ 793.520246] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.520246] env[61648]: with excutils.save_and_reraise_exception(): [ 793.520246] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.520246] env[61648]: self.force_reraise() [ 793.520246] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.520246] env[61648]: raise self.value [ 793.520246] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.520246] env[61648]: updated_port = self._update_port( [ 793.520246] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.520246] env[61648]: _ensure_no_port_binding_failure(port) [ 793.520246] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.520246] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 793.521211] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 793.521211] env[61648]: Removing descriptor: 19 [ 793.526522] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 793.551761] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 793.552056] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 793.552233] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.552416] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 793.552562] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.552706] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 793.552913] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 793.553082] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 793.553250] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 793.553410] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 793.553581] env[61648]: DEBUG nova.virt.hardware [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 793.554461] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c25e522-d268-44a2-93f4-5507454aafbb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.562680] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6d68cb-8f05-4c89-9466-990cbf1a5a81 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.576145] env[61648]: ERROR nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Traceback (most recent call last): [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] yield resources [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self.driver.spawn(context, instance, image_meta, [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] vm_ref = self.build_virtual_machine(instance, [ 793.576145] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] for vif in network_info: [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] return self._sync_wrapper(fn, *args, **kwargs) [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self.wait() [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self[:] = self._gt.wait() [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] return self._exit_event.wait() [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 793.576681] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] current.throw(*self._exc) [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] result = function(*args, **kwargs) [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] return func(*args, **kwargs) [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] raise e [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] nwinfo = self.network_api.allocate_for_instance( [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] created_port_ids = self._update_ports_for_instance( [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] with excutils.save_and_reraise_exception(): [ 793.577050] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self.force_reraise() [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] raise self.value [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] updated_port = self._update_port( [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] _ensure_no_port_binding_failure(port) [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] raise exception.PortBindingFailed(port_id=port['id']) [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 793.577413] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] [ 793.577413] env[61648]: INFO nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Terminating instance [ 793.578486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquiring lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.578656] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquired lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.578820] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.753209] env[61648]: DEBUG nova.compute.manager [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Received event network-changed-90b8b3c4-9afe-4d0c-a854-9a4b52282293 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.753420] env[61648]: DEBUG nova.compute.manager [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Refreshing instance network info cache due to event network-changed-90b8b3c4-9afe-4d0c-a854-9a4b52282293. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 793.753610] env[61648]: DEBUG oslo_concurrency.lockutils [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] Acquiring lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.983820] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.068161] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.097324] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.202238] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.474104] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4b9fac25-aa36-46bd-9346-d807ee4020ee tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.475658] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.562s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.571037] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Releasing lock "refresh_cache-d76d8aed-9126-4d21-9df9-6317c3b19f65" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.571319] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 794.571502] env[61648]: DEBUG nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 794.571673] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.586965] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.705385] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Releasing lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.705832] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 794.706021] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.706330] env[61648]: DEBUG oslo_concurrency.lockutils [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] Acquired lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.706513] env[61648]: DEBUG nova.network.neutron [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Refreshing network info cache for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.707565] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4edbf3a2-7d1b-46cd-b162-413334891e98 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.716928] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ae8678-381a-43f5-8f63-24ffc6a0915a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.739595] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 423b5f66-624b-49fe-9f65-9bd3318917c4 could not be found. [ 794.739816] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.743283] env[61648]: INFO nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 794.743536] env[61648]: DEBUG oslo.service.loopingcall [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.743756] env[61648]: DEBUG nova.compute.manager [-] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 794.743849] env[61648]: DEBUG nova.network.neutron [-] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.758989] env[61648]: DEBUG nova.network.neutron [-] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.089693] env[61648]: DEBUG nova.network.neutron [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.240136] env[61648]: DEBUG nova.network.neutron [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.268019] env[61648]: DEBUG nova.network.neutron [-] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.328919] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf615d9-82cb-4d64-8ddd-6e11ade3a786 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.332682] env[61648]: DEBUG nova.network.neutron [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.339343] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2668e719-b3f1-4970-8591-c2e007f5f770 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.373583] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260814f5-57bb-4598-9103-8232989aa1f6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.381495] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09072228-7a97-47fc-ad9f-af34c12772cb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.396671] env[61648]: DEBUG nova.compute.provider_tree [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.595610] env[61648]: INFO nova.compute.manager [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] [instance: d76d8aed-9126-4d21-9df9-6317c3b19f65] Took 1.02 seconds to deallocate network for instance. [ 795.770061] env[61648]: INFO nova.compute.manager [-] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Took 1.03 seconds to deallocate network for instance. [ 795.772229] env[61648]: DEBUG nova.compute.claims [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 795.772567] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.835733] env[61648]: DEBUG oslo_concurrency.lockutils [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] Releasing lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.836014] env[61648]: DEBUG nova.compute.manager [req-9c98df7a-7f40-4e85-bb70-b6f5847d1cac req-384ee089-cc40-4002-92b3-ac7380c8976d service nova] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Received event network-vif-deleted-90b8b3c4-9afe-4d0c-a854-9a4b52282293 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 795.901924] env[61648]: DEBUG nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.406953] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.931s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.407608] env[61648]: ERROR nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Traceback (most recent call last): [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self.driver.spawn(context, instance, image_meta, [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] vm_ref = self.build_virtual_machine(instance, [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] vif_infos = vmwarevif.get_vif_info(self._session, [ 796.407608] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] for vif in network_info: [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] return self._sync_wrapper(fn, *args, **kwargs) [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self.wait() [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self[:] = self._gt.wait() [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] return self._exit_event.wait() [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] current.throw(*self._exc) [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 796.407929] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] result = function(*args, **kwargs) [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] return func(*args, **kwargs) [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] raise e [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] nwinfo = self.network_api.allocate_for_instance( [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] created_port_ids = self._update_ports_for_instance( [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] with excutils.save_and_reraise_exception(): [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] self.force_reraise() [ 796.408284] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] raise self.value [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] updated_port = self._update_port( [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] _ensure_no_port_binding_failure(port) [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] raise exception.PortBindingFailed(port_id=port['id']) [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] nova.exception.PortBindingFailed: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. [ 796.408626] env[61648]: ERROR nova.compute.manager [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] [ 796.408626] env[61648]: DEBUG nova.compute.utils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 796.409654] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.529s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.409827] env[61648]: DEBUG nova.objects.instance [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lazy-loading 'resources' on Instance uuid b9130bac-f92b-4208-b84c-852f4a269153 {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.411104] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Build of instance bb9f6dc4-fd06-4cb5-984f-c938ed901772 was re-scheduled: Binding failed for port 2b3a0b89-5f13-4fa2-8319-e164abbb9e27, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 796.411500] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 796.411722] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquiring lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.411866] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Acquired lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.412031] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.627009] env[61648]: INFO nova.scheduler.client.report [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Deleted allocations for instance d76d8aed-9126-4d21-9df9-6317c3b19f65 [ 796.939627] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.011860] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.134890] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e92b3d35-6497-47c2-afce-aae2f0702512 tempest-VolumesAdminNegativeTest-569099347 tempest-VolumesAdminNegativeTest-569099347-project-member] Lock "d76d8aed-9126-4d21-9df9-6317c3b19f65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 173.563s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.207053] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a51153-fd90-4b32-a176-f0c572a2e4e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.215236] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56259b22-6361-4431-bce7-093f9544a9ea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.244961] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d057ff-1bf5-4ee8-8400-2798c8b31aac {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.252122] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39ddd96-1907-4717-84e2-e9cbca60416e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.265325] env[61648]: DEBUG nova.compute.provider_tree [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.514701] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Releasing lock "refresh_cache-bb9f6dc4-fd06-4cb5-984f-c938ed901772" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.514959] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 797.515193] env[61648]: DEBUG nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.515424] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.572520] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.639109] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 797.768940] env[61648]: DEBUG nova.scheduler.client.report [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.075360] env[61648]: DEBUG nova.network.neutron [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.166271] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.276468] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.866s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.278908] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.421s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.280551] env[61648]: INFO nova.compute.claims [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.297992] env[61648]: INFO nova.scheduler.client.report [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Deleted allocations for instance b9130bac-f92b-4208-b84c-852f4a269153 [ 798.580311] env[61648]: INFO nova.compute.manager [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] [instance: bb9f6dc4-fd06-4cb5-984f-c938ed901772] Took 1.06 seconds to deallocate network for instance. [ 798.805236] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c4d5b51b-05df-4ac3-ae9b-4496c2526a44 tempest-ServerShowV254Test-1391479595 tempest-ServerShowV254Test-1391479595-project-member] Lock "b9130bac-f92b-4208-b84c-852f4a269153" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.680s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.614238] env[61648]: INFO nova.scheduler.client.report [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Deleted allocations for instance bb9f6dc4-fd06-4cb5-984f-c938ed901772 [ 799.648349] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a158e6d4-42e8-442e-9c86-2c00893ff90f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.658193] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76100908-1bf7-40ad-8018-b41db3f7b55d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.693183] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f68f472-15e7-4639-940c-54da6db70adf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.701104] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4850e91b-d781-455d-adb3-36b6969fde7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.716478] env[61648]: DEBUG nova.compute.provider_tree [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.128882] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b18e4ad-ee5a-4f9b-85f0-ad621079a2b4 tempest-ServersNegativeTestMultiTenantJSON-1000829774 tempest-ServersNegativeTestMultiTenantJSON-1000829774-project-member] Lock "bb9f6dc4-fd06-4cb5-984f-c938ed901772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.595s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.218968] env[61648]: DEBUG nova.scheduler.client.report [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.633089] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 800.723809] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.724381] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 800.727811] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.046s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.729300] env[61648]: INFO nova.compute.claims [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.165437] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.236027] env[61648]: DEBUG nova.compute.utils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.237539] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 801.237834] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 801.310524] env[61648]: DEBUG nova.policy [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff74b87a93164526915e71a60d026f94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a82e8fa1e743709a82997bd988b699', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 801.698849] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Successfully created port: b5c57279-8a1d-48d6-91a1-5da524713010 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 801.740065] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 802.066085] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf494d7-42a1-4148-afce-1c12469fb112 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.076363] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e32c0f-edb7-46f9-b57b-c4d61bf89f27 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.117874] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa155f3-422a-43fb-aa3b-43ef3d10dd65 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.125947] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd6eda4-0b19-41c4-8e7f-8fa85e6a7ede {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.140762] env[61648]: DEBUG nova.compute.provider_tree [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.643787] env[61648]: DEBUG nova.scheduler.client.report [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.760562] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 802.794349] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 802.794620] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 802.794760] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.794974] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 802.795306] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.795306] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 802.795424] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 802.795572] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 802.795703] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 802.795866] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 802.800194] env[61648]: DEBUG nova.virt.hardware [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 802.801131] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8428431f-2ec2-454e-82e3-e2f3e7b0ef0c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.813021] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c364d5e5-805a-4b3d-9a39-c324ce33f6d0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.053454] env[61648]: DEBUG nova.compute.manager [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Received event network-changed-b5c57279-8a1d-48d6-91a1-5da524713010 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 803.053454] env[61648]: DEBUG nova.compute.manager [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Refreshing instance network info cache due to event network-changed-b5c57279-8a1d-48d6-91a1-5da524713010. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 803.053454] env[61648]: DEBUG oslo_concurrency.lockutils [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] Acquiring lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.053454] env[61648]: DEBUG oslo_concurrency.lockutils [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] Acquired lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.053454] env[61648]: DEBUG nova.network.neutron [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Refreshing network info cache for port b5c57279-8a1d-48d6-91a1-5da524713010 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.123288] env[61648]: ERROR nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 803.123288] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.123288] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 803.123288] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 803.123288] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.123288] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.123288] env[61648]: ERROR nova.compute.manager raise self.value [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 803.123288] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 803.123288] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.123288] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 803.123734] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.123734] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 803.123734] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 803.123734] env[61648]: ERROR nova.compute.manager [ 803.123734] env[61648]: Traceback (most recent call last): [ 803.123734] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 803.123734] env[61648]: listener.cb(fileno) [ 803.123734] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.123734] env[61648]: result = function(*args, **kwargs) [ 803.123734] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 803.123734] env[61648]: return func(*args, **kwargs) [ 803.123734] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.123734] env[61648]: raise e [ 803.123734] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.123734] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 803.123734] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 803.123734] env[61648]: created_port_ids = self._update_ports_for_instance( [ 803.123734] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 803.123734] env[61648]: with excutils.save_and_reraise_exception(): [ 803.123734] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.123734] env[61648]: self.force_reraise() [ 803.123734] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.123734] env[61648]: raise self.value [ 803.123734] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 803.123734] env[61648]: updated_port = self._update_port( [ 803.123734] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.123734] env[61648]: _ensure_no_port_binding_failure(port) [ 803.123734] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.123734] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 803.124384] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 803.124384] env[61648]: Removing descriptor: 19 [ 803.124384] env[61648]: ERROR nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Traceback (most recent call last): [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] yield resources [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self.driver.spawn(context, instance, image_meta, [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.124384] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] vm_ref = self.build_virtual_machine(instance, [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] for vif in network_info: [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return self._sync_wrapper(fn, *args, **kwargs) [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self.wait() [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self[:] = self._gt.wait() [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return self._exit_event.wait() [ 803.124664] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] result = hub.switch() [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return self.greenlet.switch() [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] result = function(*args, **kwargs) [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return func(*args, **kwargs) [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] raise e [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] nwinfo = self.network_api.allocate_for_instance( [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 803.125389] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] created_port_ids = self._update_ports_for_instance( [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] with excutils.save_and_reraise_exception(): [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self.force_reraise() [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] raise self.value [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] updated_port = self._update_port( [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] _ensure_no_port_binding_failure(port) [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.125675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] raise exception.PortBindingFailed(port_id=port['id']) [ 803.126015] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 803.126015] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] [ 803.126015] env[61648]: INFO nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Terminating instance [ 803.126015] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquiring lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.148950] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.149440] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 803.152559] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.861s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.578741] env[61648]: DEBUG nova.network.neutron [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.665115] env[61648]: DEBUG nova.compute.utils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 803.670522] env[61648]: DEBUG nova.network.neutron [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.673717] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 803.673717] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.726201] env[61648]: DEBUG nova.policy [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05baa26c79e1430c9945bfa82bd802dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '413caef8b4b34ad49a8aa707ca007dbd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.977897] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182608d4-e470-4743-b2a7-fd5d0edce518 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.988155] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e6fda2-5e2d-441c-acd4-c9761989b4bd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.026213] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a093d0-87cd-425c-9b22-d04069f811a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.034281] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a00a08-d86e-421e-b286-f81e0f383ca7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.047871] env[61648]: DEBUG nova.compute.provider_tree [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.086236] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Successfully created port: bb40f15f-6e56-469d-b57a-dbc51e99fd26 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.175262] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 804.180675] env[61648]: DEBUG oslo_concurrency.lockutils [req-36a0e1b2-13fa-456e-80d3-d48032b7c493 req-37a3d59e-189d-40b5-8289-f713fa8d5c38 service nova] Releasing lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.181170] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquired lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.183467] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.551524] env[61648]: DEBUG nova.scheduler.client.report [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.738125] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.887777] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.061113] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.906s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.061113] env[61648]: ERROR nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Traceback (most recent call last): [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self.driver.spawn(context, instance, image_meta, [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 805.061113] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] vm_ref = self.build_virtual_machine(instance, [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] vif_infos = vmwarevif.get_vif_info(self._session, [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] for vif in network_info: [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return self._sync_wrapper(fn, *args, **kwargs) [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self.wait() [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self[:] = self._gt.wait() [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return self._exit_event.wait() [ 805.061363] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] result = hub.switch() [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return self.greenlet.switch() [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] result = function(*args, **kwargs) [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] return func(*args, **kwargs) [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] raise e [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] nwinfo = self.network_api.allocate_for_instance( [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 805.061659] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] created_port_ids = self._update_ports_for_instance( [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] with excutils.save_and_reraise_exception(): [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] self.force_reraise() [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] raise self.value [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] updated_port = self._update_port( [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] _ensure_no_port_binding_failure(port) [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.062026] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] raise exception.PortBindingFailed(port_id=port['id']) [ 805.062293] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] nova.exception.PortBindingFailed: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. [ 805.062293] env[61648]: ERROR nova.compute.manager [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] [ 805.062293] env[61648]: DEBUG nova.compute.utils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 805.067021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.995s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.067462] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Build of instance 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c was re-scheduled: Binding failed for port dc2bb52d-bd0e-4697-8740-bc2adfa0c416, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 805.068082] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 805.068967] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquiring lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.071035] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Acquired lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.071035] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.089431] env[61648]: DEBUG nova.compute.manager [req-5cf6ea96-0483-46cd-b467-6c7f2c63b0ee req-6137fba6-bf28-4a9b-9944-31b40b124d67 service nova] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Received event network-vif-deleted-b5c57279-8a1d-48d6-91a1-5da524713010 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.194660] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 805.233500] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.233777] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.233930] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.234198] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.234304] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.234405] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.234611] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.234766] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.234930] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.235130] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.235304] env[61648]: DEBUG nova.virt.hardware [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.236172] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834cb6d3-bb37-4285-bf9d-894871f9b38d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.247453] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3086a538-6fb8-470b-9ed8-c964d64e2a43 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.390024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Releasing lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.390468] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 805.390651] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 805.390952] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8982e36e-eb60-4169-b433-6c1383350b7b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.401345] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4fed26-8ba6-495d-a131-80a679c3da11 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.424170] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ca295b7-50e2-4b6b-8033-991328a43f3e could not be found. [ 805.424170] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.424170] env[61648]: INFO nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 805.424170] env[61648]: DEBUG oslo.service.loopingcall [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.425306] env[61648]: DEBUG nova.compute.manager [-] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 805.425306] env[61648]: DEBUG nova.network.neutron [-] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 805.427963] env[61648]: ERROR nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 805.427963] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 805.427963] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 805.427963] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 805.427963] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.427963] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.427963] env[61648]: ERROR nova.compute.manager raise self.value [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 805.427963] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 805.427963] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.427963] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 805.428466] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.428466] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 805.428466] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 805.428466] env[61648]: ERROR nova.compute.manager [ 805.428466] env[61648]: Traceback (most recent call last): [ 805.428466] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 805.428466] env[61648]: listener.cb(fileno) [ 805.428466] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 805.428466] env[61648]: result = function(*args, **kwargs) [ 805.428466] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 805.428466] env[61648]: return func(*args, **kwargs) [ 805.428466] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 805.428466] env[61648]: raise e [ 805.428466] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 805.428466] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 805.428466] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 805.428466] env[61648]: created_port_ids = self._update_ports_for_instance( [ 805.428466] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 805.428466] env[61648]: with excutils.save_and_reraise_exception(): [ 805.428466] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.428466] env[61648]: self.force_reraise() [ 805.428466] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.428466] env[61648]: raise self.value [ 805.428466] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 805.428466] env[61648]: updated_port = self._update_port( [ 805.428466] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.428466] env[61648]: _ensure_no_port_binding_failure(port) [ 805.428466] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.428466] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 805.429280] env[61648]: nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 805.429280] env[61648]: Removing descriptor: 19 [ 805.429280] env[61648]: ERROR nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Traceback (most recent call last): [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] yield resources [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self.driver.spawn(context, instance, image_meta, [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 805.429280] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] vm_ref = self.build_virtual_machine(instance, [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] vif_infos = vmwarevif.get_vif_info(self._session, [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] for vif in network_info: [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return self._sync_wrapper(fn, *args, **kwargs) [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self.wait() [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self[:] = self._gt.wait() [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return self._exit_event.wait() [ 805.429772] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] result = hub.switch() [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return self.greenlet.switch() [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] result = function(*args, **kwargs) [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return func(*args, **kwargs) [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] raise e [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] nwinfo = self.network_api.allocate_for_instance( [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 805.430239] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] created_port_ids = self._update_ports_for_instance( [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] with excutils.save_and_reraise_exception(): [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self.force_reraise() [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] raise self.value [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] updated_port = self._update_port( [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] _ensure_no_port_binding_failure(port) [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.431160] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] raise exception.PortBindingFailed(port_id=port['id']) [ 805.431452] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 805.431452] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] [ 805.431452] env[61648]: INFO nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Terminating instance [ 805.431452] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.431581] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.431689] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.455032] env[61648]: DEBUG nova.network.neutron [-] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.594621] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.751334] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.907171] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9af362-5e61-47bf-8a9d-4eb4ade3eab3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.916149] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e1c710-68f7-4ee7-aa81-ef137c6b8f95 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.951337] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd0b9b7-b3d7-4dcf-aae3-5e947b3ce262 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.955558] env[61648]: DEBUG nova.network.neutron [-] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.959873] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23235ec9-5126-4232-b7bb-62103759095b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.973638] env[61648]: DEBUG nova.compute.provider_tree [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.976018] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.108086] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.257739] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Releasing lock "refresh_cache-6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.260144] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 806.260343] env[61648]: DEBUG nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.260515] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.282383] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.460209] env[61648]: INFO nova.compute.manager [-] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Took 1.04 seconds to deallocate network for instance. [ 806.462591] env[61648]: DEBUG nova.compute.claims [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 806.462770] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.479520] env[61648]: DEBUG nova.scheduler.client.report [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.615015] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.615015] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 806.615015] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.615015] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3f16f45-8a79-49f0-b94f-80ac3ac6ad18 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.629286] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4668d9e2-de35-4e3b-aa48-93e0668777e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.656083] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance af5e6f7b-7c21-44d1-a05c-0d34f59c0065 could not be found. [ 806.656083] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.656083] env[61648]: INFO nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Took 0.04 seconds to destroy the instance on the hypervisor. [ 806.656392] env[61648]: DEBUG oslo.service.loopingcall [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.657098] env[61648]: DEBUG nova.compute.manager [-] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.657268] env[61648]: DEBUG nova.network.neutron [-] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.688995] env[61648]: DEBUG nova.network.neutron [-] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.784687] env[61648]: DEBUG nova.network.neutron [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.986977] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.923s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.988093] env[61648]: ERROR nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Traceback (most recent call last): [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self.driver.spawn(context, instance, image_meta, [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] vm_ref = self.build_virtual_machine(instance, [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.988093] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] for vif in network_info: [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] return self._sync_wrapper(fn, *args, **kwargs) [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self.wait() [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self[:] = self._gt.wait() [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] return self._exit_event.wait() [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] current.throw(*self._exc) [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.988410] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] result = function(*args, **kwargs) [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] return func(*args, **kwargs) [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] raise e [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] nwinfo = self.network_api.allocate_for_instance( [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] created_port_ids = self._update_ports_for_instance( [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] with excutils.save_and_reraise_exception(): [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] self.force_reraise() [ 806.988746] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] raise self.value [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] updated_port = self._update_port( [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] _ensure_no_port_binding_failure(port) [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] raise exception.PortBindingFailed(port_id=port['id']) [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] nova.exception.PortBindingFailed: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. [ 806.989131] env[61648]: ERROR nova.compute.manager [instance: ffb6b3e0-5602-4c28-958d-22265337e236] [ 806.990651] env[61648]: DEBUG nova.compute.utils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 806.991973] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.538s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.994962] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Build of instance ffb6b3e0-5602-4c28-958d-22265337e236 was re-scheduled: Binding failed for port d98545f7-8d23-473e-8881-42e0cb6de758, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 806.995478] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 806.996228] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquiring lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.996482] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Acquired lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.996678] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.142713] env[61648]: DEBUG nova.compute.manager [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Received event network-changed-bb40f15f-6e56-469d-b57a-dbc51e99fd26 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.142912] env[61648]: DEBUG nova.compute.manager [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Refreshing instance network info cache due to event network-changed-bb40f15f-6e56-469d-b57a-dbc51e99fd26. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 807.143149] env[61648]: DEBUG oslo_concurrency.lockutils [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] Acquiring lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.143295] env[61648]: DEBUG oslo_concurrency.lockutils [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] Acquired lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.143455] env[61648]: DEBUG nova.network.neutron [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Refreshing network info cache for port bb40f15f-6e56-469d-b57a-dbc51e99fd26 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.191906] env[61648]: DEBUG nova.network.neutron [-] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.287335] env[61648]: INFO nova.compute.manager [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] [instance: 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c] Took 1.03 seconds to deallocate network for instance. [ 807.531542] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.672403] env[61648]: DEBUG nova.network.neutron [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.695065] env[61648]: INFO nova.compute.manager [-] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Took 1.04 seconds to deallocate network for instance. [ 807.697355] env[61648]: DEBUG nova.compute.claims [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 807.697870] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.713516] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.763860] env[61648]: DEBUG nova.network.neutron [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.826908] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f16444-557e-4b5f-b0c0-07cb01d3c45b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.842804] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c0b0bb-b5ab-407b-b43b-b36b2132829a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.875474] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2489dcd7-6b7a-4924-97aa-559f1df965ea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.883198] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee007917-8cf4-4c4a-b8ff-e6fbc4af9e1f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.901219] env[61648]: DEBUG nova.compute.provider_tree [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.221520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Releasing lock "refresh_cache-ffb6b3e0-5602-4c28-958d-22265337e236" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.221520] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 808.221520] env[61648]: DEBUG nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.221520] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.239994] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.266665] env[61648]: DEBUG oslo_concurrency.lockutils [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] Releasing lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.266665] env[61648]: DEBUG nova.compute.manager [req-dcb71051-5765-446c-a037-f3c298a3593a req-7872125d-e74a-46d6-a7bc-62e8041f66d4 service nova] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Received event network-vif-deleted-bb40f15f-6e56-469d-b57a-dbc51e99fd26 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.329720] env[61648]: INFO nova.scheduler.client.report [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Deleted allocations for instance 6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c [ 808.404623] env[61648]: DEBUG nova.scheduler.client.report [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.676578] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquiring lock "34dc9640-9b39-4e3b-b8ca-7a29ab760992" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.676803] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "34dc9640-9b39-4e3b-b8ca-7a29ab760992" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.745053] env[61648]: DEBUG nova.network.neutron [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.837916] env[61648]: DEBUG oslo_concurrency.lockutils [None req-513569e8-2769-42dc-9601-0e1d6d5032db tempest-ServersTestMultiNic-232361084 tempest-ServersTestMultiNic-232361084-project-member] Lock "6e195d07-d6e0-4d2a-9eb8-ed59c36bfc9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 171.017s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.913454] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.920s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.913454] env[61648]: ERROR nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Traceback (most recent call last): [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self.driver.spawn(context, instance, image_meta, [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self._vmops.spawn(context, instance, image_meta, injected_files, [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 808.913454] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] vm_ref = self.build_virtual_machine(instance, [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] vif_infos = vmwarevif.get_vif_info(self._session, [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] for vif in network_info: [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] return self._sync_wrapper(fn, *args, **kwargs) [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self.wait() [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self[:] = self._gt.wait() [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] return self._exit_event.wait() [ 808.913786] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] current.throw(*self._exc) [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] result = function(*args, **kwargs) [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] return func(*args, **kwargs) [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] raise e [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] nwinfo = self.network_api.allocate_for_instance( [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] created_port_ids = self._update_ports_for_instance( [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 808.914160] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] with excutils.save_and_reraise_exception(): [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] self.force_reraise() [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] raise self.value [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] updated_port = self._update_port( [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] _ensure_no_port_binding_failure(port) [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] raise exception.PortBindingFailed(port_id=port['id']) [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] nova.exception.PortBindingFailed: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. [ 808.914514] env[61648]: ERROR nova.compute.manager [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] [ 808.914900] env[61648]: DEBUG nova.compute.utils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 808.915009] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.006s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.919343] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Build of instance 6d9ab9ac-d892-47e7-9b86-a2dce40a4568 was re-scheduled: Binding failed for port 89a6b845-6769-4c0a-bfb3-67030d4c6af3, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 808.919855] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 808.920093] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.920402] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.920606] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.248206] env[61648]: INFO nova.compute.manager [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] [instance: ffb6b3e0-5602-4c28-958d-22265337e236] Took 1.03 seconds to deallocate network for instance. [ 809.345021] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 809.448190] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.552535] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.787025] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea823f1-e3d4-4e80-8047-4c89628df4de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.797258] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d9376b-b010-44ee-9eab-166c054d4893 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.833764] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f3ff3a-2c11-4d0b-81ea-4361f0988b55 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.841713] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fc2eda-3bde-4a45-a4af-e2626aa6f5e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.861017] env[61648]: DEBUG nova.compute.provider_tree [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.874487] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.059136] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-6d9ab9ac-d892-47e7-9b86-a2dce40a4568" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.059136] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 810.059136] env[61648]: DEBUG nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.059558] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.244717] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.295388] env[61648]: INFO nova.scheduler.client.report [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Deleted allocations for instance ffb6b3e0-5602-4c28-958d-22265337e236 [ 810.363315] env[61648]: DEBUG nova.scheduler.client.report [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 810.748449] env[61648]: DEBUG nova.network.neutron [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.808020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-15e095dc-6023-4d12-93ee-777d35335987 tempest-ImagesOneServerTestJSON-309802118 tempest-ImagesOneServerTestJSON-309802118-project-member] Lock "ffb6b3e0-5602-4c28-958d-22265337e236" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.242s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.872373] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.956s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.873257] env[61648]: ERROR nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Traceback (most recent call last): [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self.driver.spawn(context, instance, image_meta, [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] vm_ref = self.build_virtual_machine(instance, [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.873257] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] for vif in network_info: [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] return self._sync_wrapper(fn, *args, **kwargs) [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self.wait() [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self[:] = self._gt.wait() [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] return self._exit_event.wait() [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] current.throw(*self._exc) [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.873537] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] result = function(*args, **kwargs) [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] return func(*args, **kwargs) [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] raise e [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] nwinfo = self.network_api.allocate_for_instance( [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] created_port_ids = self._update_ports_for_instance( [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] with excutils.save_and_reraise_exception(): [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] self.force_reraise() [ 810.873829] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] raise self.value [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] updated_port = self._update_port( [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] _ensure_no_port_binding_failure(port) [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] raise exception.PortBindingFailed(port_id=port['id']) [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] nova.exception.PortBindingFailed: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. [ 810.874185] env[61648]: ERROR nova.compute.manager [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] [ 810.874958] env[61648]: DEBUG nova.compute.utils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 810.876748] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.492s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.888121] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Build of instance 26534fec-8cf8-4cdd-a91f-e63afabc0d57 was re-scheduled: Binding failed for port a5f2ac34-743c-493f-88cd-b1a4f5166a12, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 810.888121] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 810.888121] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquiring lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.888121] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Acquired lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.888356] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.252736] env[61648]: INFO nova.compute.manager [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 6d9ab9ac-d892-47e7-9b86-a2dce40a4568] Took 1.19 seconds to deallocate network for instance. [ 811.312473] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 811.421177] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.432357] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.432357] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.580561] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.732207] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273f75ab-d0c4-437c-bb7b-cf36815bdded {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.740402] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadf8f10-8c86-44b6-b78e-f35a303528e0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.772083] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4bd740-da33-4ad6-bfe7-67ff98c5b988 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.779425] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d800ec7-6568-4ecd-b5cf-de5c71d5d197 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.792738] env[61648]: DEBUG nova.compute.provider_tree [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.835087] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.083707] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Releasing lock "refresh_cache-26534fec-8cf8-4cdd-a91f-e63afabc0d57" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.083707] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 812.083707] env[61648]: DEBUG nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.084072] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 812.101370] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.296254] env[61648]: DEBUG nova.scheduler.client.report [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 812.326976] env[61648]: INFO nova.scheduler.client.report [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Deleted allocations for instance 6d9ab9ac-d892-47e7-9b86-a2dce40a4568 [ 812.604725] env[61648]: DEBUG nova.network.neutron [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.808946] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.933s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.809634] env[61648]: ERROR nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Traceback (most recent call last): [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self.driver.spawn(context, instance, image_meta, [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] vm_ref = self.build_virtual_machine(instance, [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.809634] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] for vif in network_info: [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] return self._sync_wrapper(fn, *args, **kwargs) [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self.wait() [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self[:] = self._gt.wait() [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] return self._exit_event.wait() [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] current.throw(*self._exc) [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 812.809973] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] result = function(*args, **kwargs) [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] return func(*args, **kwargs) [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] raise e [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] nwinfo = self.network_api.allocate_for_instance( [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] created_port_ids = self._update_ports_for_instance( [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] with excutils.save_and_reraise_exception(): [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] self.force_reraise() [ 812.810356] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] raise self.value [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] updated_port = self._update_port( [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] _ensure_no_port_binding_failure(port) [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] raise exception.PortBindingFailed(port_id=port['id']) [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] nova.exception.PortBindingFailed: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. [ 812.810696] env[61648]: ERROR nova.compute.manager [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] [ 812.810696] env[61648]: DEBUG nova.compute.utils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.815876] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.043s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.824024] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Build of instance d4b580e9-aae2-4c14-abd8-c6a08f0a576c was re-scheduled: Binding failed for port 0d0c85c4-7e97-4a61-9c8b-254ffd11fba9, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 812.824024] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 812.824024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.824024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquired lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.824305] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.840719] env[61648]: DEBUG oslo_concurrency.lockutils [None req-bb493430-bd95-4875-8ee5-f66071bb4c3b tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "6d9ab9ac-d892-47e7-9b86-a2dce40a4568" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.142s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.110408] env[61648]: INFO nova.compute.manager [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] [instance: 26534fec-8cf8-4cdd-a91f-e63afabc0d57] Took 1.03 seconds to deallocate network for instance. [ 813.347646] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 813.355025] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.467415] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.613349] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c78858-854c-437c-8b91-50f6ddcb2a33 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.625695] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a115886a-fee1-4f2b-b843-dffd8081a133 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.658358] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e54ea5-c7d7-42ec-b3fc-62126e0daba8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.666149] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1548fa4-d594-4fc6-a94e-fa4203432b42 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.682206] env[61648]: DEBUG nova.compute.provider_tree [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.871688] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.974729] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Releasing lock "refresh_cache-d4b580e9-aae2-4c14-abd8-c6a08f0a576c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.974729] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 813.974729] env[61648]: DEBUG nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 813.974729] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.995947] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.154210] env[61648]: INFO nova.scheduler.client.report [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Deleted allocations for instance 26534fec-8cf8-4cdd-a91f-e63afabc0d57 [ 814.185253] env[61648]: DEBUG nova.scheduler.client.report [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 814.503065] env[61648]: DEBUG nova.network.neutron [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.660388] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d5fd98d-6b05-47a9-bebe-f4966af99884 tempest-ServerActionsTestJSON-1976262008 tempest-ServerActionsTestJSON-1976262008-project-member] Lock "26534fec-8cf8-4cdd-a91f-e63afabc0d57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 166.099s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.682776] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "03f59be3-f1bb-4e3a-96ea-7b39de515397" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.688543] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "03f59be3-f1bb-4e3a-96ea-7b39de515397" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.693018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.875s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.693018] env[61648]: ERROR nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Traceback (most recent call last): [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self.driver.spawn(context, instance, image_meta, [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 814.693018] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] vm_ref = self.build_virtual_machine(instance, [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] for vif in network_info: [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] return self._sync_wrapper(fn, *args, **kwargs) [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self.wait() [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self[:] = self._gt.wait() [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] return self._exit_event.wait() [ 814.693349] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] current.throw(*self._exc) [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] result = function(*args, **kwargs) [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] return func(*args, **kwargs) [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] raise e [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] nwinfo = self.network_api.allocate_for_instance( [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] created_port_ids = self._update_ports_for_instance( [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 814.693649] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] with excutils.save_and_reraise_exception(): [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] self.force_reraise() [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] raise self.value [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] updated_port = self._update_port( [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] _ensure_no_port_binding_failure(port) [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] raise exception.PortBindingFailed(port_id=port['id']) [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] nova.exception.PortBindingFailed: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. [ 814.693941] env[61648]: ERROR nova.compute.manager [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] [ 814.694232] env[61648]: DEBUG nova.compute.utils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 814.696977] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.527s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.696977] env[61648]: INFO nova.compute.claims [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.699206] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Build of instance 423b5f66-624b-49fe-9f65-9bd3318917c4 was re-scheduled: Binding failed for port 90b8b3c4-9afe-4d0c-a854-9a4b52282293, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 814.699784] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 814.700149] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquiring lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.700431] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Acquired lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.700714] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.009865] env[61648]: INFO nova.compute.manager [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d4b580e9-aae2-4c14-abd8-c6a08f0a576c] Took 1.03 seconds to deallocate network for instance. [ 815.162514] env[61648]: DEBUG nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 815.225362] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.338636] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.688282] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.844982] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Releasing lock "refresh_cache-423b5f66-624b-49fe-9f65-9bd3318917c4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.844982] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 815.844982] env[61648]: DEBUG nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 815.844982] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.871340] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.015016] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca14b7f-46a8-414a-a622-e45554bac202 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.022667] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d5d80f-b9d1-4bdf-9da8-4f3a955c302f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.053283] env[61648]: INFO nova.scheduler.client.report [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Deleted allocations for instance d4b580e9-aae2-4c14-abd8-c6a08f0a576c [ 816.060333] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3efc3f3-b16e-4826-98b3-b3ab341df322 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.070831] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417249a1-5761-471a-a2b4-3a58a8419f8b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.087351] env[61648]: DEBUG nova.compute.provider_tree [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.376032] env[61648]: DEBUG nova.network.neutron [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.566123] env[61648]: DEBUG oslo_concurrency.lockutils [None req-277c8687-24a9-4ae7-98e0-7d74c14bd2dd tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "d4b580e9-aae2-4c14-abd8-c6a08f0a576c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.505s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.591115] env[61648]: DEBUG nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 816.876927] env[61648]: INFO nova.compute.manager [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] [instance: 423b5f66-624b-49fe-9f65-9bd3318917c4] Took 1.03 seconds to deallocate network for instance. [ 817.069432] env[61648]: DEBUG nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 817.096363] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.100020] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 817.104156] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.937s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.104156] env[61648]: INFO nova.compute.claims [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.596367] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.609019] env[61648]: DEBUG nova.compute.utils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.612259] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 817.612449] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.711631] env[61648]: DEBUG nova.policy [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c1c0b043d1b4a14b9476616fa2724c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9bc809e2dff4fc5ad3e326ba334c4c6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 817.917581] env[61648]: INFO nova.scheduler.client.report [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Deleted allocations for instance 423b5f66-624b-49fe-9f65-9bd3318917c4 [ 818.067109] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Successfully created port: 5439f0ef-c438-4b90-8975-09a2c99f1dbb {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.117795] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 818.421990] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a1be26-195b-410e-b796-06326db236c8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.425501] env[61648]: DEBUG oslo_concurrency.lockutils [None req-eca7c835-bd4f-46e7-b148-8e79b93cce4f tempest-ServerMetadataNegativeTestJSON-1093055171 tempest-ServerMetadataNegativeTestJSON-1093055171-project-member] Lock "423b5f66-624b-49fe-9f65-9bd3318917c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.235s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.429967] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2859aa63-6d4c-4e57-b7f0-08034694608a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.462621] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcf3b2c-0b78-4c52-a11e-59b635af63dd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.471451] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700a5ee1-7ce3-442a-b14e-afb4560757ab {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.490596] env[61648]: DEBUG nova.compute.provider_tree [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.929895] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 818.997179] env[61648]: DEBUG nova.scheduler.client.report [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.127334] env[61648]: DEBUG nova.compute.manager [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Received event network-changed-5439f0ef-c438-4b90-8975-09a2c99f1dbb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 819.127334] env[61648]: DEBUG nova.compute.manager [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Refreshing instance network info cache due to event network-changed-5439f0ef-c438-4b90-8975-09a2c99f1dbb. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 819.127334] env[61648]: DEBUG oslo_concurrency.lockutils [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] Acquiring lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.127334] env[61648]: DEBUG oslo_concurrency.lockutils [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] Acquired lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.127746] env[61648]: DEBUG nova.network.neutron [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Refreshing network info cache for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 819.137072] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 819.173491] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 819.174281] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 819.174577] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.175117] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 819.175254] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.177028] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 819.177028] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 819.177028] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 819.177028] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 819.177028] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 819.177247] env[61648]: DEBUG nova.virt.hardware [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 819.177753] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b0a659-799c-4878-a04f-29eb9e29bf3c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.185944] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aace4351-0e3d-4085-abc8-66d4d5a4cc42 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.360091] env[61648]: ERROR nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 819.360091] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.360091] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 819.360091] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 819.360091] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.360091] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.360091] env[61648]: ERROR nova.compute.manager raise self.value [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 819.360091] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 819.360091] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.360091] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 819.360529] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.360529] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 819.360529] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 819.360529] env[61648]: ERROR nova.compute.manager [ 819.360529] env[61648]: Traceback (most recent call last): [ 819.360529] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 819.360529] env[61648]: listener.cb(fileno) [ 819.360529] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 819.360529] env[61648]: result = function(*args, **kwargs) [ 819.360529] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 819.360529] env[61648]: return func(*args, **kwargs) [ 819.360529] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 819.360529] env[61648]: raise e [ 819.360529] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.360529] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 819.360529] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 819.360529] env[61648]: created_port_ids = self._update_ports_for_instance( [ 819.360529] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 819.360529] env[61648]: with excutils.save_and_reraise_exception(): [ 819.360529] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.360529] env[61648]: self.force_reraise() [ 819.360529] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.360529] env[61648]: raise self.value [ 819.360529] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 819.360529] env[61648]: updated_port = self._update_port( [ 819.360529] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.360529] env[61648]: _ensure_no_port_binding_failure(port) [ 819.360529] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.360529] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 819.361198] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 819.361198] env[61648]: Removing descriptor: 19 [ 819.361198] env[61648]: ERROR nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Traceback (most recent call last): [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] yield resources [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self.driver.spawn(context, instance, image_meta, [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 819.361198] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] vm_ref = self.build_virtual_machine(instance, [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] for vif in network_info: [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return self._sync_wrapper(fn, *args, **kwargs) [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self.wait() [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self[:] = self._gt.wait() [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return self._exit_event.wait() [ 819.361471] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] result = hub.switch() [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return self.greenlet.switch() [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] result = function(*args, **kwargs) [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return func(*args, **kwargs) [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] raise e [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] nwinfo = self.network_api.allocate_for_instance( [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 819.361769] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] created_port_ids = self._update_ports_for_instance( [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] with excutils.save_and_reraise_exception(): [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self.force_reraise() [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] raise self.value [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] updated_port = self._update_port( [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] _ensure_no_port_binding_failure(port) [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.362075] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] raise exception.PortBindingFailed(port_id=port['id']) [ 819.362345] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 819.362345] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] [ 819.362345] env[61648]: INFO nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Terminating instance [ 819.365408] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquiring lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.412754] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "d37aad4c-f4e9-40ab-a250-5dd3924f305c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.413105] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "d37aad4c-f4e9-40ab-a250-5dd3924f305c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.462867] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.499780] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.500336] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 819.503249] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.040s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.657283] env[61648]: DEBUG nova.network.neutron [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.744465] env[61648]: DEBUG nova.network.neutron [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.008356] env[61648]: DEBUG nova.compute.utils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.016359] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 820.016359] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.100076] env[61648]: DEBUG nova.policy [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90bed90f8831484c9c7cbfc4a0bb2338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b12898ebf9d443b9e32fd368e8c81c8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 820.247009] env[61648]: DEBUG oslo_concurrency.lockutils [req-e89246d9-2d7f-4b59-8e64-bb474bfd7270 req-0a78c721-fba7-49cf-aecd-ccf122a90cdf service nova] Releasing lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.249914] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquired lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.250110] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.315348] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d37faa7-b757-46c1-903a-e19cc6096a73 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.322773] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc91ce2-73f3-49aa-a9a9-34d7ae21e739 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.354034] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7719d0-a9be-471a-b1f8-2245cbdab17d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.360853] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8168d712-d6fb-436a-a186-45995ab90008 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.373657] env[61648]: DEBUG nova.compute.provider_tree [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.514704] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 820.591135] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Successfully created port: a40852a0-e0c6-4e0d-bc2f-d03faa3952b2 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.774875] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.869476] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.879501] env[61648]: DEBUG nova.scheduler.client.report [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.170673] env[61648]: DEBUG nova.compute.manager [req-703764cf-fb66-4cca-9a96-15a60607fdd7 req-4ba9ff23-f61f-4942-8335-3997774feac0 service nova] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Received event network-vif-deleted-5439f0ef-c438-4b90-8975-09a2c99f1dbb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 821.375797] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Releasing lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.376566] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 821.376566] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 821.376844] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e487afd-4c50-465c-a7ee-315324dd21cf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.384833] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.882s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.385581] env[61648]: ERROR nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Traceback (most recent call last): [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self.driver.spawn(context, instance, image_meta, [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] vm_ref = self.build_virtual_machine(instance, [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.385581] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] for vif in network_info: [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return self._sync_wrapper(fn, *args, **kwargs) [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self.wait() [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self[:] = self._gt.wait() [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return self._exit_event.wait() [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] result = hub.switch() [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 821.386111] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return self.greenlet.switch() [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] result = function(*args, **kwargs) [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] return func(*args, **kwargs) [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] raise e [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] nwinfo = self.network_api.allocate_for_instance( [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] created_port_ids = self._update_ports_for_instance( [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] with excutils.save_and_reraise_exception(): [ 821.386675] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] self.force_reraise() [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] raise self.value [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] updated_port = self._update_port( [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] _ensure_no_port_binding_failure(port) [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] raise exception.PortBindingFailed(port_id=port['id']) [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] nova.exception.PortBindingFailed: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. [ 821.387129] env[61648]: ERROR nova.compute.manager [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] [ 821.387755] env[61648]: DEBUG nova.compute.utils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 821.390554] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.690s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.397429] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f454c1-ac12-474f-aa5f-a143fb09f628 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.412629] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Build of instance 3ca295b7-50e2-4b6b-8033-991328a43f3e was re-scheduled: Binding failed for port b5c57279-8a1d-48d6-91a1-5da524713010, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 821.417370] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 821.417370] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquiring lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.417370] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Acquired lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.417370] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.427247] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c633de1e-3dfb-4304-ac9f-d8f4a2b725d1 could not be found. [ 821.428685] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.428685] env[61648]: INFO nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 821.428685] env[61648]: DEBUG oslo.service.loopingcall [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.428685] env[61648]: DEBUG nova.compute.manager [-] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 821.428685] env[61648]: DEBUG nova.network.neutron [-] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.449207] env[61648]: DEBUG nova.network.neutron [-] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.526694] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 821.557647] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 821.557931] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 821.558107] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.558301] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 821.558455] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.558603] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 821.558783] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 821.558938] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 821.559113] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 821.559268] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 821.559465] env[61648]: DEBUG nova.virt.hardware [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.560329] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8201f452-77a1-4a9c-a4c9-4b651b367ee9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.569099] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a100e85-0aea-413c-a1e4-53dfd02f3990 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.683307] env[61648]: ERROR nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 821.683307] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.683307] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.683307] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.683307] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.683307] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.683307] env[61648]: ERROR nova.compute.manager raise self.value [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.683307] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 821.683307] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.683307] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 821.683910] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.683910] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 821.683910] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 821.683910] env[61648]: ERROR nova.compute.manager [ 821.683910] env[61648]: Traceback (most recent call last): [ 821.683910] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 821.683910] env[61648]: listener.cb(fileno) [ 821.683910] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.683910] env[61648]: result = function(*args, **kwargs) [ 821.683910] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 821.683910] env[61648]: return func(*args, **kwargs) [ 821.683910] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.683910] env[61648]: raise e [ 821.683910] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.683910] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 821.683910] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.683910] env[61648]: created_port_ids = self._update_ports_for_instance( [ 821.683910] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.683910] env[61648]: with excutils.save_and_reraise_exception(): [ 821.683910] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.683910] env[61648]: self.force_reraise() [ 821.683910] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.683910] env[61648]: raise self.value [ 821.683910] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.683910] env[61648]: updated_port = self._update_port( [ 821.683910] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.683910] env[61648]: _ensure_no_port_binding_failure(port) [ 821.683910] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.683910] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 821.685380] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 821.685380] env[61648]: Removing descriptor: 19 [ 821.685380] env[61648]: ERROR nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Traceback (most recent call last): [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] yield resources [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self.driver.spawn(context, instance, image_meta, [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.685380] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] vm_ref = self.build_virtual_machine(instance, [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] for vif in network_info: [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return self._sync_wrapper(fn, *args, **kwargs) [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self.wait() [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self[:] = self._gt.wait() [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return self._exit_event.wait() [ 821.685820] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] result = hub.switch() [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return self.greenlet.switch() [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] result = function(*args, **kwargs) [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return func(*args, **kwargs) [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] raise e [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] nwinfo = self.network_api.allocate_for_instance( [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.686351] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] created_port_ids = self._update_ports_for_instance( [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] with excutils.save_and_reraise_exception(): [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self.force_reraise() [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] raise self.value [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] updated_port = self._update_port( [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] _ensure_no_port_binding_failure(port) [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.686857] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] raise exception.PortBindingFailed(port_id=port['id']) [ 821.687335] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 821.687335] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] [ 821.687335] env[61648]: INFO nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Terminating instance [ 821.687335] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquiring lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.687335] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquired lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.687335] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.939517] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.955979] env[61648]: DEBUG nova.network.neutron [-] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.050927] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.196531] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddafda6-b0dc-4067-9dd9-71db90730738 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.208282] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d70964-8e10-4eda-9fd5-34b0133e0ee6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.242147] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63d4e85-5780-40b9-8be5-a3864e545851 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.250399] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c73d61d-b386-4aae-8e50-fd12aa907e8b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.263362] env[61648]: DEBUG nova.compute.provider_tree [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.292280] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.400614] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.454320] env[61648]: INFO nova.compute.manager [-] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Took 1.03 seconds to deallocate network for instance. [ 822.456464] env[61648]: DEBUG nova.compute.claims [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 822.456641] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.555504] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Releasing lock "refresh_cache-3ca295b7-50e2-4b6b-8033-991328a43f3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.555756] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 822.557948] env[61648]: DEBUG nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.557948] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.573735] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.767937] env[61648]: DEBUG nova.scheduler.client.report [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 822.903654] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Releasing lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.907106] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 822.907106] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.907106] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-024cea1a-846c-4155-b364-3431a74bbd7d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.914039] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a2181a-e38d-453a-8bb7-5ea16f4a42d4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.940277] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cc301f6-45de-43b9-a88d-d94e3f00cff3 could not be found. [ 822.940530] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.940717] env[61648]: INFO nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 822.940963] env[61648]: DEBUG oslo.service.loopingcall [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.941204] env[61648]: DEBUG nova.compute.manager [-] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.941298] env[61648]: DEBUG nova.network.neutron [-] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.957090] env[61648]: DEBUG nova.network.neutron [-] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.075433] env[61648]: DEBUG nova.network.neutron [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.199395] env[61648]: DEBUG nova.compute.manager [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Received event network-changed-a40852a0-e0c6-4e0d-bc2f-d03faa3952b2 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.199668] env[61648]: DEBUG nova.compute.manager [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Refreshing instance network info cache due to event network-changed-a40852a0-e0c6-4e0d-bc2f-d03faa3952b2. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 823.199867] env[61648]: DEBUG oslo_concurrency.lockutils [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] Acquiring lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.200026] env[61648]: DEBUG oslo_concurrency.lockutils [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] Acquired lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.200191] env[61648]: DEBUG nova.network.neutron [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Refreshing network info cache for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.274262] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.886s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.274926] env[61648]: ERROR nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Traceback (most recent call last): [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self.driver.spawn(context, instance, image_meta, [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self._vmops.spawn(context, instance, image_meta, injected_files, [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] vm_ref = self.build_virtual_machine(instance, [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] vif_infos = vmwarevif.get_vif_info(self._session, [ 823.274926] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] for vif in network_info: [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return self._sync_wrapper(fn, *args, **kwargs) [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self.wait() [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self[:] = self._gt.wait() [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return self._exit_event.wait() [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] result = hub.switch() [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 823.275368] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return self.greenlet.switch() [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] result = function(*args, **kwargs) [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] return func(*args, **kwargs) [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] raise e [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] nwinfo = self.network_api.allocate_for_instance( [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] created_port_ids = self._update_ports_for_instance( [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] with excutils.save_and_reraise_exception(): [ 823.275993] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] self.force_reraise() [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] raise self.value [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] updated_port = self._update_port( [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] _ensure_no_port_binding_failure(port) [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] raise exception.PortBindingFailed(port_id=port['id']) [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] nova.exception.PortBindingFailed: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. [ 823.278193] env[61648]: ERROR nova.compute.manager [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] [ 823.278636] env[61648]: DEBUG nova.compute.utils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 823.278636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.403s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.278961] env[61648]: INFO nova.compute.claims [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.283720] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Build of instance af5e6f7b-7c21-44d1-a05c-0d34f59c0065 was re-scheduled: Binding failed for port bb40f15f-6e56-469d-b57a-dbc51e99fd26, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 823.283720] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 823.283720] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.283720] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.283720] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.459798] env[61648]: DEBUG nova.network.neutron [-] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.578310] env[61648]: INFO nova.compute.manager [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] [instance: 3ca295b7-50e2-4b6b-8033-991328a43f3e] Took 1.02 seconds to deallocate network for instance. [ 823.716732] env[61648]: DEBUG nova.network.neutron [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.784560] env[61648]: DEBUG nova.network.neutron [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.804431] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.886082] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.963502] env[61648]: INFO nova.compute.manager [-] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Took 1.02 seconds to deallocate network for instance. [ 823.966794] env[61648]: DEBUG nova.compute.claims [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 823.967541] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.293216] env[61648]: DEBUG oslo_concurrency.lockutils [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] Releasing lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.293726] env[61648]: DEBUG nova.compute.manager [req-00b80209-fa6f-4261-8a36-5942e5262dc9 req-33a90c12-b473-472d-a4dc-a5738f0242ee service nova] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Received event network-vif-deleted-a40852a0-e0c6-4e0d-bc2f-d03faa3952b2 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.388571] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-af5e6f7b-7c21-44d1-a05c-0d34f59c0065" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.388841] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 824.389032] env[61648]: DEBUG nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 824.389203] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.421111] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.553254] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc730c3-10ba-4eea-a90f-60939b803d43 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.561637] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2c22b0-2c03-4fd2-b20f-fb9d4c84fab3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.594124] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d800b38a-9ad7-433c-8339-33735436770d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.600904] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644cffe6-2502-4aec-a7cf-35335304e67a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.613848] env[61648]: DEBUG nova.compute.provider_tree [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.615899] env[61648]: INFO nova.scheduler.client.report [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Deleted allocations for instance 3ca295b7-50e2-4b6b-8033-991328a43f3e [ 824.923660] env[61648]: DEBUG nova.network.neutron [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.123346] env[61648]: DEBUG nova.scheduler.client.report [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.127832] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0e54d7b9-5ef4-46c1-8642-0121cb341067 tempest-ServersTestJSON-1335775600 tempest-ServersTestJSON-1335775600-project-member] Lock "3ca295b7-50e2-4b6b-8033-991328a43f3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.443s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.426194] env[61648]: INFO nova.compute.manager [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: af5e6f7b-7c21-44d1-a05c-0d34f59c0065] Took 1.04 seconds to deallocate network for instance. [ 825.630252] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.631354] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 825.634487] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.637919] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.803s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.639942] env[61648]: INFO nova.compute.claims [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.145892] env[61648]: DEBUG nova.compute.utils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 826.154075] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 826.160623] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 826.181969] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.220113] env[61648]: DEBUG nova.policy [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '615251e3d74a45c9a3cc0d4025866bfa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'daeee911ea304b04b902357f7b5578d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 826.459917] env[61648]: INFO nova.scheduler.client.report [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Deleted allocations for instance af5e6f7b-7c21-44d1-a05c-0d34f59c0065 [ 826.534342] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Successfully created port: 430c5d97-bf47-4d4b-a70c-ff574734d735 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.653060] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 826.913555] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67fb536b-650f-443b-9b37-6d498e0ad634 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.923490] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19936219-8f8a-4860-a0ca-bf6b5b798162 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.958116] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f539b9cc-79b1-439e-bab5-f4c724838437 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.964597] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef972030-c161-42c9-b522-a2e8ff046b4f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.979036] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1db1c3e-fcdc-4065-a264-26d89ee06bfa tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "af5e6f7b-7c21-44d1-a05c-0d34f59c0065" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.052s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.979517] env[61648]: DEBUG nova.compute.provider_tree [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.484654] env[61648]: DEBUG nova.scheduler.client.report [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.488338] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.669814] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 827.701871] env[61648]: DEBUG nova.compute.manager [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Received event network-changed-430c5d97-bf47-4d4b-a70c-ff574734d735 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.701871] env[61648]: DEBUG nova.compute.manager [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Refreshing instance network info cache due to event network-changed-430c5d97-bf47-4d4b-a70c-ff574734d735. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 827.701871] env[61648]: DEBUG oslo_concurrency.lockutils [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] Acquiring lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.701871] env[61648]: DEBUG oslo_concurrency.lockutils [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] Acquired lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.701871] env[61648]: DEBUG nova.network.neutron [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Refreshing network info cache for port 430c5d97-bf47-4d4b-a70c-ff574734d735 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.706153] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.706258] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.706468] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.706735] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.707038] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.707157] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.707414] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.707623] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.708056] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.708342] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.708588] env[61648]: DEBUG nova.virt.hardware [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.709889] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a173a4-eb09-49de-859f-a0cfe5d15282 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.720209] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d996373-fca4-4085-872b-812c2223f9d8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.967140] env[61648]: ERROR nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 827.967140] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.967140] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.967140] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.967140] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.967140] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.967140] env[61648]: ERROR nova.compute.manager raise self.value [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.967140] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 827.967140] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.967140] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 827.967654] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.967654] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 827.967654] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 827.967654] env[61648]: ERROR nova.compute.manager [ 827.967654] env[61648]: Traceback (most recent call last): [ 827.967654] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 827.967654] env[61648]: listener.cb(fileno) [ 827.967654] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.967654] env[61648]: result = function(*args, **kwargs) [ 827.967654] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.967654] env[61648]: return func(*args, **kwargs) [ 827.967654] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.967654] env[61648]: raise e [ 827.967654] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.967654] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 827.967654] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.967654] env[61648]: created_port_ids = self._update_ports_for_instance( [ 827.967654] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.967654] env[61648]: with excutils.save_and_reraise_exception(): [ 827.967654] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.967654] env[61648]: self.force_reraise() [ 827.967654] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.967654] env[61648]: raise self.value [ 827.967654] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.967654] env[61648]: updated_port = self._update_port( [ 827.967654] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.967654] env[61648]: _ensure_no_port_binding_failure(port) [ 827.967654] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.967654] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 827.968473] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 827.968473] env[61648]: Removing descriptor: 19 [ 827.968473] env[61648]: ERROR nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Traceback (most recent call last): [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] yield resources [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self.driver.spawn(context, instance, image_meta, [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.968473] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] vm_ref = self.build_virtual_machine(instance, [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] for vif in network_info: [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return self._sync_wrapper(fn, *args, **kwargs) [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self.wait() [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self[:] = self._gt.wait() [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return self._exit_event.wait() [ 827.968823] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] result = hub.switch() [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return self.greenlet.switch() [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] result = function(*args, **kwargs) [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return func(*args, **kwargs) [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] raise e [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] nwinfo = self.network_api.allocate_for_instance( [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.970236] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] created_port_ids = self._update_ports_for_instance( [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] with excutils.save_and_reraise_exception(): [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self.force_reraise() [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] raise self.value [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] updated_port = self._update_port( [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] _ensure_no_port_binding_failure(port) [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.970669] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] raise exception.PortBindingFailed(port_id=port['id']) [ 827.970972] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 827.970972] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] [ 827.970972] env[61648]: INFO nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Terminating instance [ 827.971949] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquiring lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.993165] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.993927] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 828.003359] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.132s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.006513] env[61648]: INFO nova.compute.claims [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.027646] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.233691] env[61648]: DEBUG nova.network.neutron [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.348645] env[61648]: DEBUG nova.network.neutron [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.398060] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "c98567aa-d978-4b4d-9e01-25ab70246205" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.398323] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "c98567aa-d978-4b4d-9e01-25ab70246205" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.504969] env[61648]: DEBUG nova.compute.utils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.507273] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 828.507273] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 828.553345] env[61648]: DEBUG nova.policy [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab0fa921b69462486c5b036eacfd244', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67a5a9eda983402ead3905425a511e01', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 828.830202] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Successfully created port: d69c5b1f-7caa-4eec-b39d-af557f363d10 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.851778] env[61648]: DEBUG oslo_concurrency.lockutils [req-00bc081e-29ae-4faa-b6be-561ca9d0b4b6 req-85dbb91b-f77a-4052-b7a2-7a0af54010dc service nova] Releasing lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.854775] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquired lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.854775] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.014711] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 829.323187] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8f26bd-bcda-4202-895c-1da9e8d1c5f9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.330873] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f97246-9e7c-467b-9673-851260d4d641 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.369626] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff5b85b-c9a8-4dfd-b5da-7331de34d244 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.377084] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baee212a-33ed-4bab-8205-074392f195d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.389769] env[61648]: DEBUG nova.compute.provider_tree [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.394294] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.517630] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.738166] env[61648]: DEBUG nova.compute.manager [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Received event network-vif-deleted-430c5d97-bf47-4d4b-a70c-ff574734d735 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.738392] env[61648]: DEBUG nova.compute.manager [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Received event network-changed-d69c5b1f-7caa-4eec-b39d-af557f363d10 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.738554] env[61648]: DEBUG nova.compute.manager [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Refreshing instance network info cache due to event network-changed-d69c5b1f-7caa-4eec-b39d-af557f363d10. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 829.738865] env[61648]: DEBUG oslo_concurrency.lockutils [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] Acquiring lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.738906] env[61648]: DEBUG oslo_concurrency.lockutils [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] Acquired lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.743340] env[61648]: DEBUG nova.network.neutron [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Refreshing network info cache for port d69c5b1f-7caa-4eec-b39d-af557f363d10 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.849881] env[61648]: ERROR nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 829.849881] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.849881] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 829.849881] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 829.849881] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.849881] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.849881] env[61648]: ERROR nova.compute.manager raise self.value [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 829.849881] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 829.849881] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.849881] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 829.850384] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.850384] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 829.850384] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 829.850384] env[61648]: ERROR nova.compute.manager [ 829.850384] env[61648]: Traceback (most recent call last): [ 829.850384] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 829.850384] env[61648]: listener.cb(fileno) [ 829.850384] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 829.850384] env[61648]: result = function(*args, **kwargs) [ 829.850384] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 829.850384] env[61648]: return func(*args, **kwargs) [ 829.850384] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 829.850384] env[61648]: raise e [ 829.850384] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.850384] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 829.850384] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 829.850384] env[61648]: created_port_ids = self._update_ports_for_instance( [ 829.850384] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 829.850384] env[61648]: with excutils.save_and_reraise_exception(): [ 829.850384] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.850384] env[61648]: self.force_reraise() [ 829.850384] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.850384] env[61648]: raise self.value [ 829.850384] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 829.850384] env[61648]: updated_port = self._update_port( [ 829.850384] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.850384] env[61648]: _ensure_no_port_binding_failure(port) [ 829.850384] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.850384] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 829.851342] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 829.851342] env[61648]: Removing descriptor: 19 [ 829.897585] env[61648]: DEBUG nova.scheduler.client.report [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.020367] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Releasing lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.020846] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 830.021058] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.021357] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dc0b979-2e9b-4d71-85b2-75421ecba543 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.028556] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 830.033246] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca1f5e8-e29b-424e-aafe-58335a42797f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.054357] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance acbab424-c325-4e57-81a2-3d4a1ae4a081 could not be found. [ 830.054577] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.054755] env[61648]: INFO nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Took 0.03 seconds to destroy the instance on the hypervisor. [ 830.054990] env[61648]: DEBUG oslo.service.loopingcall [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.056959] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.057193] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.057346] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.057527] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.057670] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.057823] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.058040] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.058204] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.058369] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.058530] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.058697] env[61648]: DEBUG nova.virt.hardware [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.058955] env[61648]: DEBUG nova.compute.manager [-] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.059054] env[61648]: DEBUG nova.network.neutron [-] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.061013] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d307f488-b85b-40d4-ac22-4490be366c9c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.068197] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e620f58a-0a44-45c2-960c-192bbff452e7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.087665] env[61648]: ERROR nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Traceback (most recent call last): [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] yield resources [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self.driver.spawn(context, instance, image_meta, [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] vm_ref = self.build_virtual_machine(instance, [ 830.087665] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] for vif in network_info: [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] return self._sync_wrapper(fn, *args, **kwargs) [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self.wait() [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self[:] = self._gt.wait() [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] return self._exit_event.wait() [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 830.088126] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] current.throw(*self._exc) [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] result = function(*args, **kwargs) [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] return func(*args, **kwargs) [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] raise e [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] nwinfo = self.network_api.allocate_for_instance( [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] created_port_ids = self._update_ports_for_instance( [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] with excutils.save_and_reraise_exception(): [ 830.092433] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self.force_reraise() [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] raise self.value [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] updated_port = self._update_port( [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] _ensure_no_port_binding_failure(port) [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] raise exception.PortBindingFailed(port_id=port['id']) [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 830.092853] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] [ 830.092853] env[61648]: INFO nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Terminating instance [ 830.093213] env[61648]: DEBUG nova.network.neutron [-] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.095286] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquiring lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.262185] env[61648]: DEBUG nova.network.neutron [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.374819] env[61648]: DEBUG nova.network.neutron [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.402267] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.402802] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 830.408626] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.721s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.410034] env[61648]: INFO nova.compute.claims [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.596731] env[61648]: DEBUG nova.network.neutron [-] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.883992] env[61648]: DEBUG oslo_concurrency.lockutils [req-04a4a2bb-0390-4995-a162-bfe27c187c3b req-e5c143fd-de16-4941-8aab-9cc43f22c148 service nova] Releasing lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.884453] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquired lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.884642] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.910389] env[61648]: DEBUG nova.compute.utils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.911885] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 830.916148] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.957595] env[61648]: DEBUG nova.policy [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1640f7eea62d4cecb3957335c02d4e3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'acae4fa055d943c4abab9264a1f1683f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.102486] env[61648]: INFO nova.compute.manager [-] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Took 1.04 seconds to deallocate network for instance. [ 831.104842] env[61648]: DEBUG nova.compute.claims [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 831.105042] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.254487] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Successfully created port: e27c0277-ae98-467e-98a7-d62f16c67fcc {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.403926] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.419409] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 831.524381] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.653375] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356bccf5-2dc1-4c57-a2cf-095a17036463 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.660925] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c92459-bd4f-49be-a483-4c04ff3a39a4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.694413] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d3f089-0155-4095-9178-fe625e4de660 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.701598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a0e1c2-225b-40c7-90dc-86ab8bdb8ffe {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.716381] env[61648]: DEBUG nova.compute.provider_tree [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.764274] env[61648]: DEBUG nova.compute.manager [req-5cd54187-22bb-4bf5-84d6-20283597eb53 req-42db0817-2a67-45ce-9189-1b051cd07207 service nova] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Received event network-vif-deleted-d69c5b1f-7caa-4eec-b39d-af557f363d10 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.030840] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Releasing lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.031324] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 832.031594] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.033216] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-817ad4f1-4371-408e-ad9d-054f0811f2e9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.041619] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251e9593-f447-4106-9397-8cad3f01e35b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.064576] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e199f125-9259-4268-9aaf-1f4d10da9a34 could not be found. [ 832.064807] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.064993] env[61648]: INFO nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Took 0.03 seconds to destroy the instance on the hypervisor. [ 832.065251] env[61648]: DEBUG oslo.service.loopingcall [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.065459] env[61648]: DEBUG nova.compute.manager [-] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.065553] env[61648]: DEBUG nova.network.neutron [-] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.085645] env[61648]: DEBUG nova.network.neutron [-] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.219933] env[61648]: DEBUG nova.scheduler.client.report [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.331930] env[61648]: ERROR nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 832.331930] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.331930] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 832.331930] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 832.331930] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.331930] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.331930] env[61648]: ERROR nova.compute.manager raise self.value [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 832.331930] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 832.331930] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.331930] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 832.332620] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.332620] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 832.332620] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 832.332620] env[61648]: ERROR nova.compute.manager [ 832.332620] env[61648]: Traceback (most recent call last): [ 832.332620] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 832.332620] env[61648]: listener.cb(fileno) [ 832.332620] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 832.332620] env[61648]: result = function(*args, **kwargs) [ 832.332620] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 832.332620] env[61648]: return func(*args, **kwargs) [ 832.332620] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 832.332620] env[61648]: raise e [ 832.332620] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.332620] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 832.332620] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 832.332620] env[61648]: created_port_ids = self._update_ports_for_instance( [ 832.332620] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 832.332620] env[61648]: with excutils.save_and_reraise_exception(): [ 832.332620] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.332620] env[61648]: self.force_reraise() [ 832.332620] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.332620] env[61648]: raise self.value [ 832.332620] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 832.332620] env[61648]: updated_port = self._update_port( [ 832.332620] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.332620] env[61648]: _ensure_no_port_binding_failure(port) [ 832.332620] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.332620] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 832.333819] env[61648]: nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 832.333819] env[61648]: Removing descriptor: 19 [ 832.432307] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 832.456611] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.456856] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.457032] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.457221] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.457365] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.457509] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.457749] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.457915] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.458093] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.458255] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.458422] env[61648]: DEBUG nova.virt.hardware [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.459292] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0b3a91-2727-444e-b4e1-54465c14cc98 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.467429] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d2603d-c86c-403e-89e7-9acb8bb3a8e4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.482176] env[61648]: ERROR nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Traceback (most recent call last): [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] yield resources [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self.driver.spawn(context, instance, image_meta, [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] vm_ref = self.build_virtual_machine(instance, [ 832.482176] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] for vif in network_info: [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] return self._sync_wrapper(fn, *args, **kwargs) [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self.wait() [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self[:] = self._gt.wait() [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] return self._exit_event.wait() [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 832.482689] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] current.throw(*self._exc) [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] result = function(*args, **kwargs) [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] return func(*args, **kwargs) [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] raise e [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] nwinfo = self.network_api.allocate_for_instance( [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] created_port_ids = self._update_ports_for_instance( [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] with excutils.save_and_reraise_exception(): [ 832.483265] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self.force_reraise() [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] raise self.value [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] updated_port = self._update_port( [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] _ensure_no_port_binding_failure(port) [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] raise exception.PortBindingFailed(port_id=port['id']) [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 832.483766] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] [ 832.483766] env[61648]: INFO nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Terminating instance [ 832.484462] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.484620] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquired lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.484785] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.588363] env[61648]: DEBUG nova.network.neutron [-] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.725503] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.726068] env[61648]: DEBUG nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 832.731022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.133s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.731022] env[61648]: INFO nova.compute.claims [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.006655] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.077509] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.090571] env[61648]: INFO nova.compute.manager [-] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Took 1.02 seconds to deallocate network for instance. [ 833.093073] env[61648]: DEBUG nova.compute.claims [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 833.093253] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.234351] env[61648]: DEBUG nova.compute.utils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.237643] env[61648]: DEBUG nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 833.580047] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Releasing lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.580435] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 833.580634] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.580970] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfae07b1-089f-473b-a9bc-32ec94bef5da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.590143] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c696e7d2-d89c-4125-abec-e430bb463a25 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.611877] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c399cd8d-6cad-43d8-9226-36f9d9c247e3 could not be found. [ 833.612095] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.612278] env[61648]: INFO nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 833.612511] env[61648]: DEBUG oslo.service.loopingcall [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.612720] env[61648]: DEBUG nova.compute.manager [-] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 833.612816] env[61648]: DEBUG nova.network.neutron [-] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 833.626322] env[61648]: DEBUG nova.network.neutron [-] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.738309] env[61648]: DEBUG nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 833.789218] env[61648]: DEBUG nova.compute.manager [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Received event network-changed-e27c0277-ae98-467e-98a7-d62f16c67fcc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.789433] env[61648]: DEBUG nova.compute.manager [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Refreshing instance network info cache due to event network-changed-e27c0277-ae98-467e-98a7-d62f16c67fcc. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 833.789644] env[61648]: DEBUG oslo_concurrency.lockutils [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] Acquiring lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.789837] env[61648]: DEBUG oslo_concurrency.lockutils [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] Acquired lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.790020] env[61648]: DEBUG nova.network.neutron [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Refreshing network info cache for port e27c0277-ae98-467e-98a7-d62f16c67fcc {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.957239] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55615172-383c-4c38-aff2-4dd4b952b420 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.965325] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1ac095-16fa-4fb4-907a-bd70707ffa92 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.002935] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d1a7a8-874f-41fc-8312-115989105d9f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.009972] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a912684-314e-4e00-8679-72f70dae8e99 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.022809] env[61648]: DEBUG nova.compute.provider_tree [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.129064] env[61648]: DEBUG nova.network.neutron [-] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.309256] env[61648]: DEBUG nova.network.neutron [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.422773] env[61648]: DEBUG nova.network.neutron [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.526041] env[61648]: DEBUG nova.scheduler.client.report [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 834.634021] env[61648]: INFO nova.compute.manager [-] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Took 1.02 seconds to deallocate network for instance. [ 834.634148] env[61648]: DEBUG nova.compute.claims [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 834.634239] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.750300] env[61648]: DEBUG nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 834.779069] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.779304] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.779453] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.779627] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.779824] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.779995] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.780295] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.780450] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.780610] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.780785] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.780979] env[61648]: DEBUG nova.virt.hardware [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.781857] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945b4dd6-d7ac-42be-8124-6a5a6736583c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.789916] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3576236a-501d-432a-94b7-c78a9dcfbd7e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.804168] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.809486] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Creating folder: Project (bffa907f223a430986e1c42650416c06). Parent ref: group-v285225. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 834.809786] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0e3191c-0670-4362-a0e1-45ad3011cf4b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.820284] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Created folder: Project (bffa907f223a430986e1c42650416c06) in parent group-v285225. [ 834.820459] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Creating folder: Instances. Parent ref: group-v285246. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 834.820677] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2b027a9-a7a3-40c0-86bd-62d1a7b5a8be {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.828054] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Created folder: Instances in parent group-v285246. [ 834.828409] env[61648]: DEBUG oslo.service.loopingcall [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.828484] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.828652] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c424818c-5ce0-4606-a715-1e6a13ed132e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.843826] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.843826] env[61648]: value = "task-1336676" [ 834.843826] env[61648]: _type = "Task" [ 834.843826] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.850744] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336676, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.927539] env[61648]: DEBUG oslo_concurrency.lockutils [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] Releasing lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.927539] env[61648]: DEBUG nova.compute.manager [req-4d6d1c75-4a73-4d48-8051-91b5fc57dec0 req-25859657-b8ec-4dd0-bd86-a205ffa74cb1 service nova] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Received event network-vif-deleted-e27c0277-ae98-467e-98a7-d62f16c67fcc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.031416] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.031966] env[61648]: DEBUG nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.034673] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.572s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.036084] env[61648]: INFO nova.compute.claims [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.353725] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336676, 'name': CreateVM_Task, 'duration_secs': 0.254503} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.353874] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.354312] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.354471] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.354789] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.355035] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3b5289d-b420-4cf5-8ed7-2186d2fccf0a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.359310] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 835.359310] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]523a255d-7c75-5258-b65f-c3ae6aab8d2e" [ 835.359310] env[61648]: _type = "Task" [ 835.359310] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.367944] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]523a255d-7c75-5258-b65f-c3ae6aab8d2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.548797] env[61648]: DEBUG nova.compute.utils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.552416] env[61648]: DEBUG nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 835.869246] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]523a255d-7c75-5258-b65f-c3ae6aab8d2e, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.869593] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.869765] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.870034] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.870187] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.870368] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.870622] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b26928a-5e33-4908-b860-f377e6dec5e4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.878420] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.878594] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.879377] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-209bc189-b176-45c2-bb3d-ad193e769623 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.884267] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 835.884267] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]529ec1ee-1304-8a02-ff0e-d0e93a0e8d3b" [ 835.884267] env[61648]: _type = "Task" [ 835.884267] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.893250] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]529ec1ee-1304-8a02-ff0e-d0e93a0e8d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.053791] env[61648]: DEBUG nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.185060] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.185280] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.236250] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e29daf5-f16c-457a-a480-00f388b28ba9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.244107] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f93bc24-7dfb-4621-917a-cb26559b3f25 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.273062] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16841e4b-ef22-4c01-b179-1839c1888de5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.280329] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95985da-f23a-450d-91da-8ecba7d497d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.293704] env[61648]: DEBUG nova.compute.provider_tree [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.398025] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]529ec1ee-1304-8a02-ff0e-d0e93a0e8d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.009104} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.398025] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1524f6ec-4507-45c8-8750-452e36aaf4c3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.401556] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 836.401556] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52641cfa-b7a7-071d-6849-a9924dc4ddaf" [ 836.401556] env[61648]: _type = "Task" [ 836.401556] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.409358] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52641cfa-b7a7-071d-6849-a9924dc4ddaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.692591] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.692923] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 836.693173] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 836.799065] env[61648]: DEBUG nova.scheduler.client.report [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.914481] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52641cfa-b7a7-071d-6849-a9924dc4ddaf, 'name': SearchDatastore_Task, 'duration_secs': 0.008256} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.914481] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.914481] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 32a2c7ce-2980-4eac-ad52-b8d5d67d669b/32a2c7ce-2980-4eac-ad52-b8d5d67d669b.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.914481] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47a86b2d-ae2a-44da-8db8-0f483def2c97 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.922018] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 836.922018] env[61648]: value = "task-1336677" [ 836.922018] env[61648]: _type = "Task" [ 836.922018] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.926648] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.072028] env[61648]: DEBUG nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.107158] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.107158] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.107158] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.107158] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.107523] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.107523] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.107688] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.108299] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.108688] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.109067] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.109407] env[61648]: DEBUG nova.virt.hardware [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.110498] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0b480a-e714-4274-9fe0-f564fd182f4a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.121022] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09811c5-5c88-4352-98bc-419ba8795df0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.137232] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.144177] env[61648]: DEBUG oslo.service.loopingcall [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.144687] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.145112] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e1fc439-3190-4c23-97d7-4df7eba739f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.163419] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.163419] env[61648]: value = "task-1336678" [ 837.163419] env[61648]: _type = "Task" [ 837.163419] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.174350] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336678, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.199497] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.199954] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.200274] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.200554] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.203673] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.203673] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.203673] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.203673] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 837.203673] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Didn't find any instances for network info cache update. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 837.203673] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.203941] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.203941] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.203941] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.203941] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.203941] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.203941] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 837.204162] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.304022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.304022] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 837.305759] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.849s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.434576] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471962} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.434890] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] 32a2c7ce-2980-4eac-ad52-b8d5d67d669b/32a2c7ce-2980-4eac-ad52-b8d5d67d669b.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 837.435128] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.435479] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3b7ec75-696c-4454-95b6-db9a795eb3b9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.441565] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 837.441565] env[61648]: value = "task-1336679" [ 837.441565] env[61648]: _type = "Task" [ 837.441565] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.449620] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336679, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.673876] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336678, 'name': CreateVM_Task, 'duration_secs': 0.405772} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.674049] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.674458] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.674636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.674997] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 837.675298] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57afbb9e-0ccc-4b9c-b174-f0e4ddff0726 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.679641] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 837.679641] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]5215af37-c7a6-9ea5-e7a3-ce78cb443bc8" [ 837.679641] env[61648]: _type = "Task" [ 837.679641] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.688331] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5215af37-c7a6-9ea5-e7a3-ce78cb443bc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.706441] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.819188] env[61648]: DEBUG nova.compute.utils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.823638] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 837.825016] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.882401] env[61648]: DEBUG nova.policy [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c3db53656e04669a68dd3d2cf6bb80c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee6c03999a744864be947640ed1b85f7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 837.953508] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336679, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058323} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.953815] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.955482] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbb278f-0029-4038-ac5d-6023f9138b06 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.986625] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 32a2c7ce-2980-4eac-ad52-b8d5d67d669b/32a2c7ce-2980-4eac-ad52-b8d5d67d669b.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.989800] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-295e1190-0874-4827-8066-529d5269134d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.013577] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 838.013577] env[61648]: value = "task-1336680" [ 838.013577] env[61648]: _type = "Task" [ 838.013577] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.025545] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336680, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.107559] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965c22c8-b205-427d-aeee-7a61798ba69b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.115941] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccc6b97-2872-4330-91c0-d8a6a767bded {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.148937] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebdad3f-f3d7-46d7-a15a-e2ddf8bb1070 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.157396] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d14c5d-f384-4bcc-a026-9dc987460592 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.174608] env[61648]: DEBUG nova.compute.provider_tree [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 838.191752] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5215af37-c7a6-9ea5-e7a3-ce78cb443bc8, 'name': SearchDatastore_Task, 'duration_secs': 0.008894} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.192510] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.192510] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.192777] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.192969] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.193580] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.193580] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e09e3f9-1650-4b6a-9fd0-db3814ff57c4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.201585] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.201720] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.202622] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03ccd1eb-ea2a-41bc-a992-5dc47a1772e9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.208357] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 838.208357] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]525bfb5d-b64b-7403-3850-59a7107aa470" [ 838.208357] env[61648]: _type = "Task" [ 838.208357] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.217163] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]525bfb5d-b64b-7403-3850-59a7107aa470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.290821] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Successfully created port: dd20c3c3-c478-4b1a-ad58-2a04e64bbe15 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.324449] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 838.522595] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336680, 'name': ReconfigVM_Task, 'duration_secs': 0.268399} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.522924] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 32a2c7ce-2980-4eac-ad52-b8d5d67d669b/32a2c7ce-2980-4eac-ad52-b8d5d67d669b.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.523580] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7db657e6-e9c6-4c11-b198-087a105a130c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.530303] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 838.530303] env[61648]: value = "task-1336681" [ 838.530303] env[61648]: _type = "Task" [ 838.530303] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.539159] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336681, 'name': Rename_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.694719] env[61648]: ERROR nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [req-b8bc66dc-2105-4c5f-8f84-b81fbdff2147] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b8bc66dc-2105-4c5f-8f84-b81fbdff2147"}]}: nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 838.713131] env[61648]: DEBUG nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 838.720555] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]525bfb5d-b64b-7403-3850-59a7107aa470, 'name': SearchDatastore_Task, 'duration_secs': 0.008502} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.721352] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-703dd3e0-6509-48cb-8f40-e4514b0c9811 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.726207] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 838.726207] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e01709-26af-efb6-c4c6-60bb48632784" [ 838.726207] env[61648]: _type = "Task" [ 838.726207] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.727049] env[61648]: DEBUG nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 838.727297] env[61648]: DEBUG nova.compute.provider_tree [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 838.736797] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52e01709-26af-efb6-c4c6-60bb48632784, 'name': SearchDatastore_Task, 'duration_secs': 0.008462} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.737089] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.737257] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.738130] env[61648]: DEBUG nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 838.739831] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-099ff0de-2810-4d91-b6e3-e3ed002ee143 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.745459] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 838.745459] env[61648]: value = "task-1336682" [ 838.745459] env[61648]: _type = "Task" [ 838.745459] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.752632] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.759807] env[61648]: DEBUG nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 839.037212] env[61648]: DEBUG nova.compute.manager [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Received event network-changed-dd20c3c3-c478-4b1a-ad58-2a04e64bbe15 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 839.037498] env[61648]: DEBUG nova.compute.manager [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Refreshing instance network info cache due to event network-changed-dd20c3c3-c478-4b1a-ad58-2a04e64bbe15. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 839.037811] env[61648]: DEBUG oslo_concurrency.lockutils [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] Acquiring lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.037811] env[61648]: DEBUG oslo_concurrency.lockutils [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] Acquired lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.038379] env[61648]: DEBUG nova.network.neutron [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Refreshing network info cache for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.042635] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336681, 'name': Rename_Task, 'duration_secs': 0.131499} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.045978] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.046725] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03583205-7fe9-4340-8927-dac6b0e60e62 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.053676] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 839.053676] env[61648]: value = "task-1336683" [ 839.053676] env[61648]: _type = "Task" [ 839.053676] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.061983] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83bf38d-cb3e-4f03-a540-bb3e00c4114a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.070284] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336683, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.075569] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b712f62-0327-4f5e-9459-6f55b5314cb3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.108439] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5352caca-00c6-4afb-93cd-5216da5160c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.116535] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e011969b-20f4-47d6-bafd-9e0c069e785b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.131705] env[61648]: DEBUG nova.compute.provider_tree [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.256616] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336682, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481229} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.257769] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.257769] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.257769] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46ee4bac-6bd7-4188-b5fc-b92a004a2ae6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.262953] env[61648]: ERROR nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 839.262953] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 839.262953] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 839.262953] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 839.262953] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.262953] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.262953] env[61648]: ERROR nova.compute.manager raise self.value [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 839.262953] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 839.262953] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.262953] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 839.263370] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.263370] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 839.263370] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 839.263370] env[61648]: ERROR nova.compute.manager [ 839.263370] env[61648]: Traceback (most recent call last): [ 839.263370] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 839.263370] env[61648]: listener.cb(fileno) [ 839.263370] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 839.263370] env[61648]: result = function(*args, **kwargs) [ 839.263370] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 839.263370] env[61648]: return func(*args, **kwargs) [ 839.263370] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 839.263370] env[61648]: raise e [ 839.263370] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 839.263370] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 839.263370] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 839.263370] env[61648]: created_port_ids = self._update_ports_for_instance( [ 839.263370] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 839.263370] env[61648]: with excutils.save_and_reraise_exception(): [ 839.263370] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.263370] env[61648]: self.force_reraise() [ 839.263370] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.263370] env[61648]: raise self.value [ 839.263370] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 839.263370] env[61648]: updated_port = self._update_port( [ 839.263370] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.263370] env[61648]: _ensure_no_port_binding_failure(port) [ 839.263370] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.263370] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 839.264093] env[61648]: nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 839.264093] env[61648]: Removing descriptor: 19 [ 839.264894] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 839.264894] env[61648]: value = "task-1336684" [ 839.264894] env[61648]: _type = "Task" [ 839.264894] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.274320] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.337113] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 839.364958] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 839.365247] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 839.365362] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.365540] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 839.365680] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.365821] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 839.366086] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 839.366237] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 839.366405] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 839.366565] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 839.366740] env[61648]: DEBUG nova.virt.hardware [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 839.367609] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6912100f-96bc-4180-8f32-8d69f7691888 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.375543] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243b1033-8360-4f2b-b137-24cd8d3a0408 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.391346] env[61648]: ERROR nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Traceback (most recent call last): [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] yield resources [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self.driver.spawn(context, instance, image_meta, [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] vm_ref = self.build_virtual_machine(instance, [ 839.391346] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] for vif in network_info: [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] return self._sync_wrapper(fn, *args, **kwargs) [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self.wait() [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self[:] = self._gt.wait() [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] return self._exit_event.wait() [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 839.391726] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] current.throw(*self._exc) [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] result = function(*args, **kwargs) [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] return func(*args, **kwargs) [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] raise e [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] nwinfo = self.network_api.allocate_for_instance( [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] created_port_ids = self._update_ports_for_instance( [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] with excutils.save_and_reraise_exception(): [ 839.392048] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self.force_reraise() [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] raise self.value [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] updated_port = self._update_port( [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] _ensure_no_port_binding_failure(port) [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] raise exception.PortBindingFailed(port_id=port['id']) [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 839.392349] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] [ 839.392349] env[61648]: INFO nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Terminating instance [ 839.394281] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.563553] env[61648]: DEBUG oslo_vmware.api [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336683, 'name': PowerOnVM_Task, 'duration_secs': 0.475877} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.564351] env[61648]: DEBUG nova.network.neutron [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.565992] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.566215] env[61648]: INFO nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Took 4.82 seconds to spawn the instance on the hypervisor. [ 839.566391] env[61648]: DEBUG nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 839.567170] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e897e31-79e4-4ad3-9118-9d28e6066ca9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.618701] env[61648]: DEBUG nova.network.neutron [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.661483] env[61648]: DEBUG nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 839.661715] env[61648]: DEBUG nova.compute.provider_tree [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 95 to 96 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 839.661906] env[61648]: DEBUG nova.compute.provider_tree [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.773606] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070389} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.773857] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.774622] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7ef4b1-95a6-4256-8c2c-451d26579c2a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.794629] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.794875] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c5892dc-e9f5-452b-b447-ead249c7621c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.813649] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 839.813649] env[61648]: value = "task-1336685" [ 839.813649] env[61648]: _type = "Task" [ 839.813649] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.823269] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.083965] env[61648]: INFO nova.compute.manager [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Took 24.41 seconds to build instance. [ 840.121591] env[61648]: DEBUG oslo_concurrency.lockutils [req-251b9984-0142-47ea-8b85-07b283cbfaaa req-c871a61a-4b4a-49af-9ac5-471ab42a92bd service nova] Releasing lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.121995] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquired lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.122201] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.167982] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.862s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.168642] env[61648]: ERROR nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Traceback (most recent call last): [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self.driver.spawn(context, instance, image_meta, [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] vm_ref = self.build_virtual_machine(instance, [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 840.168642] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] for vif in network_info: [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return self._sync_wrapper(fn, *args, **kwargs) [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self.wait() [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self[:] = self._gt.wait() [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return self._exit_event.wait() [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] result = hub.switch() [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 840.168936] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return self.greenlet.switch() [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] result = function(*args, **kwargs) [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] return func(*args, **kwargs) [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] raise e [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] nwinfo = self.network_api.allocate_for_instance( [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] created_port_ids = self._update_ports_for_instance( [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] with excutils.save_and_reraise_exception(): [ 840.169255] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] self.force_reraise() [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] raise self.value [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] updated_port = self._update_port( [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] _ensure_no_port_binding_failure(port) [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] raise exception.PortBindingFailed(port_id=port['id']) [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] nova.exception.PortBindingFailed: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. [ 840.169569] env[61648]: ERROR nova.compute.manager [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] [ 840.169834] env[61648]: DEBUG nova.compute.utils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 840.170590] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.204s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.173982] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Build of instance c633de1e-3dfb-4304-ac9f-d8f4a2b725d1 was re-scheduled: Binding failed for port 5439f0ef-c438-4b90-8975-09a2c99f1dbb, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 840.174463] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 840.174690] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquiring lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.174837] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Acquired lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.174994] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.323697] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336685, 'name': ReconfigVM_Task, 'duration_secs': 0.264218} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.323938] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Reconfigured VM instance instance-00000044 to attach disk [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.324590] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35cef4b3-691c-4e02-adca-f5c9ad5b5d3f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.330539] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 840.330539] env[61648]: value = "task-1336686" [ 840.330539] env[61648]: _type = "Task" [ 840.330539] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.338655] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336686, 'name': Rename_Task} progress is 5%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.586577] env[61648]: DEBUG oslo_concurrency.lockutils [None req-34ffc55c-59d4-4cd3-87bd-5929457110af tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.545s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.638870] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.682571] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.698512] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.753484] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.839845] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336686, 'name': Rename_Task, 'duration_secs': 0.131685} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.840104] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.840509] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af385c99-de3a-438f-83c3-29e26db67b70 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.849077] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 840.849077] env[61648]: value = "task-1336687" [ 840.849077] env[61648]: _type = "Task" [ 840.849077] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.858925] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.886598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bf222b-a361-4b90-a569-3afef886801d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.893713] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a513239f-c53b-4377-866e-674a33a9352d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.923335] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a647f46e-7152-44f0-b841-3fc206557d25 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.930434] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4279ca-f925-4958-a6e2-9935da63672a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.943669] env[61648]: DEBUG nova.compute.provider_tree [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.082693] env[61648]: DEBUG nova.compute.manager [req-e64b39cc-2c51-464c-af77-d15bda664646 req-d106d857-973f-4fae-a130-08287ba2fb1e service nova] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Received event network-vif-deleted-dd20c3c3-c478-4b1a-ad58-2a04e64bbe15 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.089260] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 841.186817] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Releasing lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.187048] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 841.187298] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 841.187662] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-467b19cf-92e6-4364-8a4a-c12d8039f9a9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.197519] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e97a050-4c98-480c-9966-4fec13310818 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.223178] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7 could not be found. [ 841.223412] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 841.223591] env[61648]: INFO nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 841.223827] env[61648]: DEBUG oslo.service.loopingcall [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.224041] env[61648]: DEBUG nova.compute.manager [-] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 841.224140] env[61648]: DEBUG nova.network.neutron [-] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.239648] env[61648]: DEBUG nova.network.neutron [-] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.256140] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Releasing lock "refresh_cache-c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.256439] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 841.256666] env[61648]: DEBUG nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 841.256883] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.271180] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.357889] env[61648]: DEBUG oslo_vmware.api [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336687, 'name': PowerOnVM_Task, 'duration_secs': 0.40142} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.358939] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.358939] env[61648]: INFO nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Took 4.29 seconds to spawn the instance on the hypervisor. [ 841.358939] env[61648]: DEBUG nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 841.359444] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec71397-02d9-4105-8e3c-dd3b8189b091 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.446810] env[61648]: DEBUG nova.scheduler.client.report [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.606980] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.743639] env[61648]: DEBUG nova.network.neutron [-] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.773323] env[61648]: DEBUG nova.network.neutron [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.876675] env[61648]: INFO nova.compute.manager [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Took 24.30 seconds to build instance. [ 841.951770] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.781s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.952424] env[61648]: ERROR nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Traceback (most recent call last): [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self.driver.spawn(context, instance, image_meta, [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] vm_ref = self.build_virtual_machine(instance, [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] vif_infos = vmwarevif.get_vif_info(self._session, [ 841.952424] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] for vif in network_info: [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return self._sync_wrapper(fn, *args, **kwargs) [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self.wait() [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self[:] = self._gt.wait() [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return self._exit_event.wait() [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] result = hub.switch() [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 841.952747] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return self.greenlet.switch() [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] result = function(*args, **kwargs) [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] return func(*args, **kwargs) [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] raise e [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] nwinfo = self.network_api.allocate_for_instance( [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] created_port_ids = self._update_ports_for_instance( [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] with excutils.save_and_reraise_exception(): [ 841.953083] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] self.force_reraise() [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] raise self.value [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] updated_port = self._update_port( [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] _ensure_no_port_binding_failure(port) [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] raise exception.PortBindingFailed(port_id=port['id']) [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] nova.exception.PortBindingFailed: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. [ 841.953404] env[61648]: ERROR nova.compute.manager [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] [ 841.953689] env[61648]: DEBUG nova.compute.utils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 841.954318] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.773s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.955744] env[61648]: INFO nova.compute.claims [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.962018] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Build of instance 9cc301f6-45de-43b9-a88d-d94e3f00cff3 was re-scheduled: Binding failed for port a40852a0-e0c6-4e0d-bc2f-d03faa3952b2, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 841.962018] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 841.962018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquiring lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.962018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Acquired lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.962310] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.245369] env[61648]: INFO nova.compute.manager [-] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Took 1.02 seconds to deallocate network for instance. [ 842.248904] env[61648]: DEBUG nova.compute.claims [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 842.249124] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.275356] env[61648]: INFO nova.compute.manager [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] [instance: c633de1e-3dfb-4304-ac9f-d8f4a2b725d1] Took 1.02 seconds to deallocate network for instance. [ 842.380150] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3fe55d8c-fa54-4772-8931-221cdf9bae66 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "acc5b6cb-16ee-4756-9088-fa094eb83daa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.855s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.494676] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.592447] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.811604] env[61648]: INFO nova.compute.manager [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Rebuilding instance [ 842.849108] env[61648]: DEBUG nova.compute.manager [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.850092] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9810e083-5c19-4d1d-839c-e042e828b185 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.883033] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 843.095235] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Releasing lock "refresh_cache-9cc301f6-45de-43b9-a88d-d94e3f00cff3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.095467] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 843.095644] env[61648]: DEBUG nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 843.095810] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.111732] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.153199] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99970ca-096d-4c44-9004-b22fc4bbc721 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.161628] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73ff9be-f7d5-4097-bbfd-31df2587cc98 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.192384] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a30601e-9692-4ab1-ad9c-3dd4fd56d6de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.199182] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7245c51a-156c-4ff2-ae5e-04b707dabc25 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.213113] env[61648]: DEBUG nova.compute.provider_tree [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.310022] env[61648]: INFO nova.scheduler.client.report [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Deleted allocations for instance c633de1e-3dfb-4304-ac9f-d8f4a2b725d1 [ 843.361171] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.361477] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8819ce3-f219-41ea-b72f-4e4deaf7fd70 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.368978] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 843.368978] env[61648]: value = "task-1336688" [ 843.368978] env[61648]: _type = "Task" [ 843.368978] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.377385] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.406020] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.618073] env[61648]: DEBUG nova.network.neutron [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.716409] env[61648]: DEBUG nova.scheduler.client.report [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.821772] env[61648]: DEBUG oslo_concurrency.lockutils [None req-853bc7db-47dc-4c80-b997-986aa58ef2dd tempest-ServersAdminNegativeTestJSON-1934551166 tempest-ServersAdminNegativeTestJSON-1934551166-project-member] Lock "c633de1e-3dfb-4304-ac9f-d8f4a2b725d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.905s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.877695] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336688, 'name': PowerOffVM_Task, 'duration_secs': 0.151289} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.877955] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.878191] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.878911] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7c1a17-bd98-4af7-8900-a3b105e38a5c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.884915] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.885150] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6038fefb-7ff0-450c-a2ea-403090b84919 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.909259] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.909414] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.909592] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleting the datastore file [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.909851] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2520f528-f70d-4b43-9d71-31999b2a8764 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.915514] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 843.915514] env[61648]: value = "task-1336690" [ 843.915514] env[61648]: _type = "Task" [ 843.915514] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.923133] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336690, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.121458] env[61648]: INFO nova.compute.manager [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] [instance: 9cc301f6-45de-43b9-a88d-d94e3f00cff3] Took 1.03 seconds to deallocate network for instance. [ 844.222184] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.223166] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.227227] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.200s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.229154] env[61648]: INFO nova.compute.claims [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.324599] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 844.424765] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336690, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097512} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.425364] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.425556] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 844.425733] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.737367] env[61648]: DEBUG nova.compute.utils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 844.747021] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 844.747021] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.849386] env[61648]: DEBUG nova.policy [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c3db53656e04669a68dd3d2cf6bb80c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee6c03999a744864be947640ed1b85f7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 844.856191] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.146832] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Successfully created port: eaee6d3c-9b6c-4c24-863e-7248ef491f8f {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.169306] env[61648]: INFO nova.scheduler.client.report [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Deleted allocations for instance 9cc301f6-45de-43b9-a88d-d94e3f00cff3 [ 845.244809] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.461658] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.462008] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.462247] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.462503] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.462695] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.462910] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.463283] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.463469] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.463778] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.464051] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.464276] env[61648]: DEBUG nova.virt.hardware [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.465257] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113c79c5-f79b-4953-9586-fa2b2d2228bd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.473898] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20debe76-cb53-412e-9c7b-384f050f13e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.490697] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.496493] env[61648]: DEBUG oslo.service.loopingcall [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.498625] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.499470] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40c9013-5d70-43a0-82f1-fc53a37cc92a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.502233] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecd46e7a-cae8-4d84-94cf-02e6f70eb19c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.524778] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c9416d-2b5c-48df-a6bd-a05d5fa020a2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.527894] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.527894] env[61648]: value = "task-1336691" [ 845.527894] env[61648]: _type = "Task" [ 845.527894] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.561481] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a4fb0a-16fb-4886-a1d6-892240618817 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.567447] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336691, 'name': CreateVM_Task} progress is 15%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.572218] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b7d3b7-9057-4ae3-83fa-0a45f0f72145 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.585605] env[61648]: DEBUG nova.compute.provider_tree [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.682422] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ae8f4d7f-8ae4-4ff7-9fba-a4aaa41d3578 tempest-InstanceActionsV221TestJSON-2066765800 tempest-InstanceActionsV221TestJSON-2066765800-project-member] Lock "9cc301f6-45de-43b9-a88d-d94e3f00cff3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.338s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.037993] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336691, 'name': CreateVM_Task, 'duration_secs': 0.30604} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.039012] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 846.039012] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.039012] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.040346] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 846.040346] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d3b2a9a-d701-456c-8284-d88d0066520d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.043992] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 846.043992] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52dc3c94-e1cd-d5cc-1d60-34a03cb437a8" [ 846.043992] env[61648]: _type = "Task" [ 846.043992] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.051727] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52dc3c94-e1cd-d5cc-1d60-34a03cb437a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.088776] env[61648]: DEBUG nova.scheduler.client.report [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.190909] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 846.252344] env[61648]: DEBUG nova.compute.manager [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] [instance: db35b417-bcdb-4380-927a-f755e6421624] Received event network-changed-eaee6d3c-9b6c-4c24-863e-7248ef491f8f {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 846.252540] env[61648]: DEBUG nova.compute.manager [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] [instance: db35b417-bcdb-4380-927a-f755e6421624] Refreshing instance network info cache due to event network-changed-eaee6d3c-9b6c-4c24-863e-7248ef491f8f. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 846.252746] env[61648]: DEBUG oslo_concurrency.lockutils [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] Acquiring lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.252886] env[61648]: DEBUG oslo_concurrency.lockutils [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] Acquired lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.254184] env[61648]: DEBUG nova.network.neutron [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] [instance: db35b417-bcdb-4380-927a-f755e6421624] Refreshing network info cache for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.259046] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.297020] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.297020] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.297020] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.297235] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.297235] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.297641] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.297967] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.298272] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.298567] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.298859] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.299160] env[61648]: DEBUG nova.virt.hardware [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.302209] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b687a2b-f8dc-446b-b9bc-5b40bcfe809d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.308942] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860c99ac-739f-49a1-9c7a-a2cc53dbb707 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.466151] env[61648]: ERROR nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 846.466151] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.466151] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 846.466151] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 846.466151] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.466151] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.466151] env[61648]: ERROR nova.compute.manager raise self.value [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 846.466151] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 846.466151] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.466151] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 846.466882] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.466882] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 846.466882] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 846.466882] env[61648]: ERROR nova.compute.manager [ 846.466882] env[61648]: Traceback (most recent call last): [ 846.466882] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 846.466882] env[61648]: listener.cb(fileno) [ 846.466882] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.466882] env[61648]: result = function(*args, **kwargs) [ 846.466882] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 846.466882] env[61648]: return func(*args, **kwargs) [ 846.466882] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.466882] env[61648]: raise e [ 846.466882] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.466882] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 846.466882] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 846.466882] env[61648]: created_port_ids = self._update_ports_for_instance( [ 846.466882] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 846.466882] env[61648]: with excutils.save_and_reraise_exception(): [ 846.466882] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.466882] env[61648]: self.force_reraise() [ 846.466882] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.466882] env[61648]: raise self.value [ 846.466882] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 846.466882] env[61648]: updated_port = self._update_port( [ 846.466882] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.466882] env[61648]: _ensure_no_port_binding_failure(port) [ 846.466882] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.466882] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 846.467739] env[61648]: nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 846.467739] env[61648]: Removing descriptor: 16 [ 846.467739] env[61648]: ERROR nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] Traceback (most recent call last): [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] yield resources [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self.driver.spawn(context, instance, image_meta, [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 846.467739] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] vm_ref = self.build_virtual_machine(instance, [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] vif_infos = vmwarevif.get_vif_info(self._session, [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] for vif in network_info: [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return self._sync_wrapper(fn, *args, **kwargs) [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self.wait() [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self[:] = self._gt.wait() [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return self._exit_event.wait() [ 846.468104] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] result = hub.switch() [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return self.greenlet.switch() [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] result = function(*args, **kwargs) [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return func(*args, **kwargs) [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] raise e [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] nwinfo = self.network_api.allocate_for_instance( [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 846.468425] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] created_port_ids = self._update_ports_for_instance( [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] with excutils.save_and_reraise_exception(): [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self.force_reraise() [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] raise self.value [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] updated_port = self._update_port( [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] _ensure_no_port_binding_failure(port) [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.468847] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] raise exception.PortBindingFailed(port_id=port['id']) [ 846.469205] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 846.469205] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] [ 846.469205] env[61648]: INFO nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Terminating instance [ 846.469205] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.554827] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52dc3c94-e1cd-d5cc-1d60-34a03cb437a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009265} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.555167] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.555409] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.555636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.555779] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.555950] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.556228] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba5b03cf-4d8a-4642-8263-a0ce7a4c09f6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.564334] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.564508] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.565318] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aa148fb-22b0-46ff-b99c-555af2c64ab0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.569916] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 846.569916] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52731c58-7118-b90d-50e7-2ede641f4e0e" [ 846.569916] env[61648]: _type = "Task" [ 846.569916] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.579327] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52731c58-7118-b90d-50e7-2ede641f4e0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.594375] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.594860] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 846.597494] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.492s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.717246] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.777957] env[61648]: DEBUG nova.network.neutron [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.873376] env[61648]: DEBUG nova.network.neutron [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] [instance: db35b417-bcdb-4380-927a-f755e6421624] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.082058] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52731c58-7118-b90d-50e7-2ede641f4e0e, 'name': SearchDatastore_Task, 'duration_secs': 0.009666} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.082058] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9256f1d0-7b51-4910-8152-a372d1c17b3a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.087032] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 847.087032] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]527f87dc-e188-226c-f125-12b4bf0df0c2" [ 847.087032] env[61648]: _type = "Task" [ 847.087032] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.094517] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]527f87dc-e188-226c-f125-12b4bf0df0c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.106648] env[61648]: DEBUG nova.compute.utils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 847.108585] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 847.108807] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.218478] env[61648]: DEBUG nova.policy [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab719482a69c4ba2b0725bb68a05930c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e97d3c0049d747fe80907ef09f3ed754', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 847.356302] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f27bfe-fc4b-4985-b6f9-c404563cbb7d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.365285] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b98f435-350b-4d0a-80e0-92304495f420 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.401206] env[61648]: DEBUG oslo_concurrency.lockutils [req-f74dc67e-551a-459e-986a-84cee93d7704 req-7210672c-1f3f-4742-8a6d-d77aa044262d service nova] Releasing lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.401206] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquired lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.401206] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.402303] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a2db52-cc7f-4d47-ad28-f687e9d1d792 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.411224] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875be9f7-53a5-46c5-9b0e-5faef7ebf0e7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.427497] env[61648]: DEBUG nova.compute.provider_tree [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.600559] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]527f87dc-e188-226c-f125-12b4bf0df0c2, 'name': SearchDatastore_Task, 'duration_secs': 0.00906} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.600927] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.601143] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.601570] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e72d4c3-1a79-47f1-a3e3-25649ddb3817 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.609678] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 847.609678] env[61648]: value = "task-1336692" [ 847.609678] env[61648]: _type = "Task" [ 847.609678] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.616370] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 847.624758] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.783317] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Successfully created port: cab8fc31-5619-4956-a775-42241810c27c {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.927917] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.930283] env[61648]: DEBUG nova.scheduler.client.report [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.055191] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.128353] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492684} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.132899] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.132899] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.133778] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b60a31d6-cfb6-46d3-852d-9c85863ab124 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.142861] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 848.142861] env[61648]: value = "task-1336693" [ 848.142861] env[61648]: _type = "Task" [ 848.142861] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.156318] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.294754] env[61648]: DEBUG nova.compute.manager [req-f1c20bb9-112e-4cfa-804f-a11339754466 req-07ee0702-d18e-4e85-ade9-9d432eb1537d service nova] [instance: db35b417-bcdb-4380-927a-f755e6421624] Received event network-vif-deleted-eaee6d3c-9b6c-4c24-863e-7248ef491f8f {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.440056] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.840s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.440056] env[61648]: ERROR nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Traceback (most recent call last): [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self.driver.spawn(context, instance, image_meta, [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self._vmops.spawn(context, instance, image_meta, injected_files, [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 848.440056] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] vm_ref = self.build_virtual_machine(instance, [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] vif_infos = vmwarevif.get_vif_info(self._session, [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] for vif in network_info: [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return self._sync_wrapper(fn, *args, **kwargs) [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self.wait() [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self[:] = self._gt.wait() [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return self._exit_event.wait() [ 848.440462] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] result = hub.switch() [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return self.greenlet.switch() [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] result = function(*args, **kwargs) [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] return func(*args, **kwargs) [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] raise e [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] nwinfo = self.network_api.allocate_for_instance( [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 848.440805] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] created_port_ids = self._update_ports_for_instance( [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] with excutils.save_and_reraise_exception(): [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] self.force_reraise() [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] raise self.value [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] updated_port = self._update_port( [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] _ensure_no_port_binding_failure(port) [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.441151] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] raise exception.PortBindingFailed(port_id=port['id']) [ 848.441528] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] nova.exception.PortBindingFailed: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. [ 848.441528] env[61648]: ERROR nova.compute.manager [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] [ 848.441528] env[61648]: DEBUG nova.compute.utils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 848.444912] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.347s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.445284] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Build of instance acbab424-c325-4e57-81a2-3d4a1ae4a081 was re-scheduled: Binding failed for port 430c5d97-bf47-4d4b-a70c-ff574734d735, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 848.445897] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 848.446510] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquiring lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.446826] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Acquired lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.447148] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.559749] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Releasing lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.560065] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 848.560312] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.560619] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f174a5b-7fc0-4672-b274-355787a0def8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.572957] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711c1f93-1937-4b29-9339-bc093358885d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.600530] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db35b417-bcdb-4380-927a-f755e6421624 could not be found. [ 848.600781] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.601063] env[61648]: INFO nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Took 0.04 seconds to destroy the instance on the hypervisor. [ 848.601240] env[61648]: DEBUG oslo.service.loopingcall [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.601488] env[61648]: DEBUG nova.compute.manager [-] [instance: db35b417-bcdb-4380-927a-f755e6421624] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.601579] env[61648]: DEBUG nova.network.neutron [-] [instance: db35b417-bcdb-4380-927a-f755e6421624] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.628594] env[61648]: DEBUG nova.network.neutron [-] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.634764] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 848.655985] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068175} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.658430] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.660765] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d36c93-d1ae-4833-bfc9-51eb663d9d17 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.666758] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 848.667084] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 848.667383] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.667483] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 848.667986] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.668213] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 848.668471] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 848.669685] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 848.669685] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 848.669685] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 848.669685] env[61648]: DEBUG nova.virt.hardware [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 848.670477] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c19b50-b492-44fa-8537-7492d7aac672 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.687274] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a1fbf5-f297-43e3-abb8-09ea09f16d48 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.700587] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.700941] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe7ae3fc-ffda-443e-ae5d-0c40628b350b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.731383] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 848.731383] env[61648]: value = "task-1336694" [ 848.731383] env[61648]: _type = "Task" [ 848.731383] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.738587] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336694, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.974822] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.115018] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.131478] env[61648]: DEBUG nova.network.neutron [-] [instance: db35b417-bcdb-4380-927a-f755e6421624] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.206370] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82cb5aa-23fd-4037-bf63-c97a6e7db657 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.214872] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0764f57-fe12-4865-a688-835ae13ae3b3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.260305] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac31aa59-c967-495d-8b6e-1c0bf69a6a97 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.268731] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336694, 'name': ReconfigVM_Task, 'duration_secs': 0.437317} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.271391] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Reconfigured VM instance instance-00000044 to attach disk [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa/acc5b6cb-16ee-4756-9088-fa094eb83daa.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.272703] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3972e114-9f0d-4b19-8c3d-927ce1acd8a4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.275706] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6f355a-246f-412f-b791-0444f4e3d93b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.295229] env[61648]: DEBUG nova.compute.provider_tree [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.298192] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 849.298192] env[61648]: value = "task-1336695" [ 849.298192] env[61648]: _type = "Task" [ 849.298192] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.311036] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336695, 'name': Rename_Task} progress is 14%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.364738] env[61648]: ERROR nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 849.364738] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 849.364738] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 849.364738] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 849.364738] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.364738] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.364738] env[61648]: ERROR nova.compute.manager raise self.value [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 849.364738] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 849.364738] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.364738] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 849.365191] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.365191] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 849.365191] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 849.365191] env[61648]: ERROR nova.compute.manager [ 849.365191] env[61648]: Traceback (most recent call last): [ 849.365191] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 849.365191] env[61648]: listener.cb(fileno) [ 849.365191] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 849.365191] env[61648]: result = function(*args, **kwargs) [ 849.365191] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 849.365191] env[61648]: return func(*args, **kwargs) [ 849.365191] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 849.365191] env[61648]: raise e [ 849.365191] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 849.365191] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 849.365191] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 849.365191] env[61648]: created_port_ids = self._update_ports_for_instance( [ 849.365191] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 849.365191] env[61648]: with excutils.save_and_reraise_exception(): [ 849.365191] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.365191] env[61648]: self.force_reraise() [ 849.365191] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.365191] env[61648]: raise self.value [ 849.365191] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 849.365191] env[61648]: updated_port = self._update_port( [ 849.365191] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.365191] env[61648]: _ensure_no_port_binding_failure(port) [ 849.365191] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.365191] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 849.365993] env[61648]: nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 849.365993] env[61648]: Removing descriptor: 16 [ 849.365993] env[61648]: ERROR nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Traceback (most recent call last): [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] yield resources [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self.driver.spawn(context, instance, image_meta, [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 849.365993] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] vm_ref = self.build_virtual_machine(instance, [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] vif_infos = vmwarevif.get_vif_info(self._session, [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] for vif in network_info: [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return self._sync_wrapper(fn, *args, **kwargs) [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self.wait() [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self[:] = self._gt.wait() [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return self._exit_event.wait() [ 849.366427] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] result = hub.switch() [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return self.greenlet.switch() [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] result = function(*args, **kwargs) [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return func(*args, **kwargs) [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] raise e [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] nwinfo = self.network_api.allocate_for_instance( [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 849.366757] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] created_port_ids = self._update_ports_for_instance( [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] with excutils.save_and_reraise_exception(): [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self.force_reraise() [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] raise self.value [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] updated_port = self._update_port( [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] _ensure_no_port_binding_failure(port) [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.367072] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] raise exception.PortBindingFailed(port_id=port['id']) [ 849.367836] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 849.367836] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] [ 849.367836] env[61648]: INFO nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Terminating instance [ 849.368546] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.368745] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.368933] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.616214] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Releasing lock "refresh_cache-acbab424-c325-4e57-81a2-3d4a1ae4a081" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.616214] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 849.616214] env[61648]: DEBUG nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 849.616214] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.629536] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.639807] env[61648]: INFO nova.compute.manager [-] [instance: db35b417-bcdb-4380-927a-f755e6421624] Took 1.04 seconds to deallocate network for instance. [ 849.640359] env[61648]: DEBUG nova.compute.claims [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 849.640359] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.799979] env[61648]: DEBUG nova.scheduler.client.report [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.814496] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336695, 'name': Rename_Task, 'duration_secs': 0.135155} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.814496] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.814496] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c92f72dd-9087-4925-9ee3-0fb50b0b15b4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.819733] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 849.819733] env[61648]: value = "task-1336696" [ 849.819733] env[61648]: _type = "Task" [ 849.819733] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.827804] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.843408] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "e4adb624-e900-4838-a5c5-2cd0d488f458" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.843627] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "e4adb624-e900-4838-a5c5-2cd0d488f458" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.894358] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.015841] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.134500] env[61648]: DEBUG nova.network.neutron [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.308339] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.868s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.308983] env[61648]: ERROR nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Traceback (most recent call last): [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self.driver.spawn(context, instance, image_meta, [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] vm_ref = self.build_virtual_machine(instance, [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] vif_infos = vmwarevif.get_vif_info(self._session, [ 850.308983] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] for vif in network_info: [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] return self._sync_wrapper(fn, *args, **kwargs) [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self.wait() [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self[:] = self._gt.wait() [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] return self._exit_event.wait() [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] current.throw(*self._exc) [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 850.309290] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] result = function(*args, **kwargs) [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] return func(*args, **kwargs) [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] raise e [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] nwinfo = self.network_api.allocate_for_instance( [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] created_port_ids = self._update_ports_for_instance( [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] with excutils.save_and_reraise_exception(): [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] self.force_reraise() [ 850.309723] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] raise self.value [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] updated_port = self._update_port( [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] _ensure_no_port_binding_failure(port) [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] raise exception.PortBindingFailed(port_id=port['id']) [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] nova.exception.PortBindingFailed: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. [ 850.310042] env[61648]: ERROR nova.compute.manager [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] [ 850.310042] env[61648]: DEBUG nova.compute.utils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 850.311595] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Build of instance e199f125-9259-4268-9aaf-1f4d10da9a34 was re-scheduled: Binding failed for port d69c5b1f-7caa-4eec-b39d-af557f363d10, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 850.312021] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 850.312253] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquiring lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.312397] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Acquired lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.312552] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.314035] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.680s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.335867] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336696, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.350983] env[61648]: DEBUG nova.compute.manager [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Received event network-changed-cab8fc31-5619-4956-a775-42241810c27c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.351185] env[61648]: DEBUG nova.compute.manager [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Refreshing instance network info cache due to event network-changed-cab8fc31-5619-4956-a775-42241810c27c. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.353521] env[61648]: DEBUG oslo_concurrency.lockutils [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] Acquiring lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.520917] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.521301] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.521515] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.521832] env[61648]: DEBUG oslo_concurrency.lockutils [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] Acquired lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.522009] env[61648]: DEBUG nova.network.neutron [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Refreshing network info cache for port cab8fc31-5619-4956-a775-42241810c27c {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.523504] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eced241e-a228-4434-9824-5a7f24dd36ad {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.542054] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8409d2-623c-44ad-b77f-e0b780fda39c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.569574] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa8fb674-60e3-431c-b8c3-9cc548965e18 could not be found. [ 850.569825] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.570681] env[61648]: INFO nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Took 0.05 seconds to destroy the instance on the hypervisor. [ 850.571211] env[61648]: DEBUG oslo.service.loopingcall [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.571540] env[61648]: DEBUG nova.compute.manager [-] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.571646] env[61648]: DEBUG nova.network.neutron [-] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.593489] env[61648]: DEBUG nova.network.neutron [-] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.636471] env[61648]: INFO nova.compute.manager [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] [instance: acbab424-c325-4e57-81a2-3d4a1ae4a081] Took 1.02 seconds to deallocate network for instance. [ 850.657977] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquiring lock "b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.658239] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Lock "b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.833483] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.848010] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336696, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.940718] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.050189] env[61648]: DEBUG nova.network.neutron [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.097102] env[61648]: DEBUG nova.network.neutron [-] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.139690] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f592bc12-696d-4210-895d-197a7d803fb7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.148989] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3278b6-ded6-4aca-be3f-943dcea871f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.193978] env[61648]: DEBUG nova.network.neutron [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.197405] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5399e378-160d-45e4-9d08-5157c317fe3c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.209057] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc4b01b-d848-4a64-9c76-4c4aa1cb8a85 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.231759] env[61648]: DEBUG nova.compute.provider_tree [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.334718] env[61648]: DEBUG oslo_vmware.api [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336696, 'name': PowerOnVM_Task, 'duration_secs': 1.256117} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.334996] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.335215] env[61648]: DEBUG nova.compute.manager [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.336068] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9aef43-f9b4-4774-bc74-f48524745f33 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.447723] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Releasing lock "refresh_cache-e199f125-9259-4268-9aaf-1f4d10da9a34" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.447984] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 851.448188] env[61648]: DEBUG nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 851.448475] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.467480] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.599348] env[61648]: INFO nova.compute.manager [-] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Took 1.03 seconds to deallocate network for instance. [ 851.601820] env[61648]: DEBUG nova.compute.claims [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 851.602030] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.702481] env[61648]: DEBUG oslo_concurrency.lockutils [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] Releasing lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.702741] env[61648]: DEBUG nova.compute.manager [req-d28f892d-aaed-48ba-b818-07b3df6dbeb1 req-448ea0d5-5987-473b-9038-592fa5edf17d service nova] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Received event network-vif-deleted-cab8fc31-5619-4956-a775-42241810c27c {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.713869] env[61648]: INFO nova.scheduler.client.report [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Deleted allocations for instance acbab424-c325-4e57-81a2-3d4a1ae4a081 [ 851.735632] env[61648]: DEBUG nova.scheduler.client.report [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.853922] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.974120] env[61648]: DEBUG nova.network.neutron [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.223407] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e8f61ec9-1a4e-4f0f-b4bd-5e9a55ff767f tempest-ServerActionsTestOtherB-637326756 tempest-ServerActionsTestOtherB-637326756-project-member] Lock "acbab424-c325-4e57-81a2-3d4a1ae4a081" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.562s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.241650] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.927s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.241650] env[61648]: ERROR nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Traceback (most recent call last): [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self.driver.spawn(context, instance, image_meta, [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 852.241650] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] vm_ref = self.build_virtual_machine(instance, [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] for vif in network_info: [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] return self._sync_wrapper(fn, *args, **kwargs) [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self.wait() [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self[:] = self._gt.wait() [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] return self._exit_event.wait() [ 852.245948] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] current.throw(*self._exc) [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] result = function(*args, **kwargs) [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] return func(*args, **kwargs) [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] raise e [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] nwinfo = self.network_api.allocate_for_instance( [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] created_port_ids = self._update_ports_for_instance( [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 852.246330] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] with excutils.save_and_reraise_exception(): [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] self.force_reraise() [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] raise self.value [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] updated_port = self._update_port( [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] _ensure_no_port_binding_failure(port) [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] raise exception.PortBindingFailed(port_id=port['id']) [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] nova.exception.PortBindingFailed: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. [ 852.246680] env[61648]: ERROR nova.compute.manager [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] [ 852.247040] env[61648]: DEBUG nova.compute.utils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 852.247040] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.537s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.247040] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.247040] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 852.247040] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.637s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.247219] env[61648]: INFO nova.compute.claims [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.248360] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Build of instance c399cd8d-6cad-43d8-9226-36f9d9c247e3 was re-scheduled: Binding failed for port e27c0277-ae98-467e-98a7-d62f16c67fcc, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 852.248761] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 852.248986] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.249533] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquired lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.249533] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.254019] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d0c758-78e8-4719-b100-505b596f5f9f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.260535] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35037bfe-094e-4d2a-8f52-26f1531ad9af {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.275898] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54abb1e8-2622-42c5-8ac6-39da03d4e43d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.282600] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28ebe59-676c-4ee7-9f85-5e5af6254857 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.311625] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181427MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 852.311799] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.476572] env[61648]: INFO nova.compute.manager [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] [instance: e199f125-9259-4268-9aaf-1f4d10da9a34] Took 1.03 seconds to deallocate network for instance. [ 852.728534] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 852.777199] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.826419] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "acc5b6cb-16ee-4756-9088-fa094eb83daa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.826687] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "acc5b6cb-16ee-4756-9088-fa094eb83daa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.826891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "acc5b6cb-16ee-4756-9088-fa094eb83daa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.827093] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "acc5b6cb-16ee-4756-9088-fa094eb83daa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.827314] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "acc5b6cb-16ee-4756-9088-fa094eb83daa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.829741] env[61648]: INFO nova.compute.manager [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Terminating instance [ 852.831353] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "refresh_cache-acc5b6cb-16ee-4756-9088-fa094eb83daa" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.831555] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "refresh_cache-acc5b6cb-16ee-4756-9088-fa094eb83daa" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.831740] env[61648]: DEBUG nova.network.neutron [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.895551] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.259719] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.356160] env[61648]: DEBUG nova.network.neutron [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.398407] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Releasing lock "refresh_cache-c399cd8d-6cad-43d8-9226-36f9d9c247e3" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.398656] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 853.398835] env[61648]: DEBUG nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 853.398998] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.422678] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.428144] env[61648]: DEBUG nova.network.neutron [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.508803] env[61648]: INFO nova.scheduler.client.report [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Deleted allocations for instance e199f125-9259-4268-9aaf-1f4d10da9a34 [ 853.528231] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5548d123-a44f-413e-b35a-5e20e665a363 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.536267] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866e2062-b2f9-42a8-9942-e03ab3e2eaa2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.569017] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286ae66c-fb1a-4950-a3b2-dc8a75ad111d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.576759] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f762ac95-5cc2-4b20-b80e-cfcb9bd02f5b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.591067] env[61648]: DEBUG nova.compute.provider_tree [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.926189] env[61648]: DEBUG nova.network.neutron [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.927774] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "refresh_cache-acc5b6cb-16ee-4756-9088-fa094eb83daa" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.928181] env[61648]: DEBUG nova.compute.manager [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 853.928413] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.929437] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f6a5a3-2d3a-4bfc-874d-886e63acc03e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.937943] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.937943] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47020b3b-5497-45b1-a1c4-2fd4a59b419d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.944288] env[61648]: DEBUG oslo_vmware.api [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 853.944288] env[61648]: value = "task-1336697" [ 853.944288] env[61648]: _type = "Task" [ 853.944288] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.960274] env[61648]: DEBUG oslo_vmware.api [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.019176] env[61648]: DEBUG oslo_concurrency.lockutils [None req-4a6346c9-1c3b-4262-b3ce-5d7f31ecd137 tempest-ServerRescueTestJSONUnderV235-1445137845 tempest-ServerRescueTestJSONUnderV235-1445137845-project-member] Lock "e199f125-9259-4268-9aaf-1f4d10da9a34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.651s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.094898] env[61648]: DEBUG nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 854.430364] env[61648]: INFO nova.compute.manager [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: c399cd8d-6cad-43d8-9226-36f9d9c247e3] Took 1.03 seconds to deallocate network for instance. [ 854.455895] env[61648]: DEBUG oslo_vmware.api [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336697, 'name': PowerOffVM_Task, 'duration_secs': 0.186491} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.456572] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.457650] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.458009] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ab7a3f7-82ef-4eb4-a588-21696c151d4b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.487203] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.487483] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.487674] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleting the datastore file [datastore2] acc5b6cb-16ee-4756-9088-fa094eb83daa {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.487927] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7127a6b5-8d52-4489-bb08-c293235fc5ee {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.495164] env[61648]: DEBUG oslo_vmware.api [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 854.495164] env[61648]: value = "task-1336699" [ 854.495164] env[61648]: _type = "Task" [ 854.495164] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.504506] env[61648]: DEBUG oslo_vmware.api [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336699, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.523913] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 854.599908] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.601496] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 854.605377] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.356s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.017020] env[61648]: DEBUG oslo_vmware.api [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336699, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098733} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.017020] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.017020] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.017020] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.017020] env[61648]: INFO nova.compute.manager [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Took 1.09 seconds to destroy the instance on the hypervisor. [ 855.017544] env[61648]: DEBUG oslo.service.loopingcall [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.017544] env[61648]: DEBUG nova.compute.manager [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.017544] env[61648]: DEBUG nova.network.neutron [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.040938] env[61648]: DEBUG nova.network.neutron [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.057672] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.114937] env[61648]: DEBUG nova.compute.utils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.127168] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 855.127168] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.199440] env[61648]: DEBUG nova.policy [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8658758cec10421ea417eb40a1a88ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61bcab083e6b4e1da5a11cfc573e1e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 855.367347] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a26f5a-2e60-452a-a250-5cdb5f2b1bc6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.374945] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ef852b-a5ce-41a6-8b95-68ce83a5ec66 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.407071] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b44ca0f-57fe-4bb6-bf80-4b7f91e93129 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.414730] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c9c70e-7dae-4c2b-b9f0-70a717d039a6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.429077] env[61648]: DEBUG nova.compute.provider_tree [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.477709] env[61648]: INFO nova.scheduler.client.report [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Deleted allocations for instance c399cd8d-6cad-43d8-9226-36f9d9c247e3 [ 855.548739] env[61648]: DEBUG nova.network.neutron [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.624630] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 855.632462] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Successfully created port: f89097cb-f6da-490f-bf27-bf292cbddd33 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.932766] env[61648]: DEBUG nova.scheduler.client.report [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.993229] env[61648]: DEBUG oslo_concurrency.lockutils [None req-93f68824-6940-4a62-94a3-10696222985a tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "c399cd8d-6cad-43d8-9226-36f9d9c247e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.475s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.052377] env[61648]: INFO nova.compute.manager [-] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Took 1.04 seconds to deallocate network for instance. [ 856.440089] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.835s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.441469] env[61648]: ERROR nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Traceback (most recent call last): [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self.driver.spawn(context, instance, image_meta, [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] vm_ref = self.build_virtual_machine(instance, [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 856.441469] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] for vif in network_info: [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] return self._sync_wrapper(fn, *args, **kwargs) [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self.wait() [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self[:] = self._gt.wait() [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] return self._exit_event.wait() [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] current.throw(*self._exc) [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 856.441832] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] result = function(*args, **kwargs) [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] return func(*args, **kwargs) [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] raise e [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] nwinfo = self.network_api.allocate_for_instance( [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] created_port_ids = self._update_ports_for_instance( [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] with excutils.save_and_reraise_exception(): [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] self.force_reraise() [ 856.442153] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] raise self.value [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] updated_port = self._update_port( [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] _ensure_no_port_binding_failure(port) [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] raise exception.PortBindingFailed(port_id=port['id']) [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] nova.exception.PortBindingFailed: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. [ 856.442457] env[61648]: ERROR nova.compute.manager [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] [ 856.442457] env[61648]: DEBUG nova.compute.utils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 856.443860] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.038s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.446625] env[61648]: INFO nova.compute.claims [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.449507] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Build of instance 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7 was re-scheduled: Binding failed for port dd20c3c3-c478-4b1a-ad58-2a04e64bbe15, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 856.450513] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 856.450840] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.451169] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquired lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.451368] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.498483] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 856.511163] env[61648]: DEBUG nova.compute.manager [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Received event network-changed-f89097cb-f6da-490f-bf27-bf292cbddd33 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 856.511351] env[61648]: DEBUG nova.compute.manager [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Refreshing instance network info cache due to event network-changed-f89097cb-f6da-490f-bf27-bf292cbddd33. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 856.511554] env[61648]: DEBUG oslo_concurrency.lockutils [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] Acquiring lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.511693] env[61648]: DEBUG oslo_concurrency.lockutils [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] Acquired lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.511846] env[61648]: DEBUG nova.network.neutron [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Refreshing network info cache for port f89097cb-f6da-490f-bf27-bf292cbddd33 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.559245] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.641920] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 856.679129] env[61648]: ERROR nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 856.679129] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 856.679129] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 856.679129] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 856.679129] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.679129] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.679129] env[61648]: ERROR nova.compute.manager raise self.value [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 856.679129] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 856.679129] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.679129] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 856.679687] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.679687] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 856.679687] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 856.679687] env[61648]: ERROR nova.compute.manager [ 856.679687] env[61648]: Traceback (most recent call last): [ 856.679687] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 856.679687] env[61648]: listener.cb(fileno) [ 856.679687] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 856.679687] env[61648]: result = function(*args, **kwargs) [ 856.679687] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 856.679687] env[61648]: return func(*args, **kwargs) [ 856.679687] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 856.679687] env[61648]: raise e [ 856.679687] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 856.679687] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 856.679687] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 856.679687] env[61648]: created_port_ids = self._update_ports_for_instance( [ 856.679687] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 856.679687] env[61648]: with excutils.save_and_reraise_exception(): [ 856.679687] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.679687] env[61648]: self.force_reraise() [ 856.679687] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.679687] env[61648]: raise self.value [ 856.679687] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 856.679687] env[61648]: updated_port = self._update_port( [ 856.679687] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.679687] env[61648]: _ensure_no_port_binding_failure(port) [ 856.679687] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.679687] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 856.680665] env[61648]: nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 856.680665] env[61648]: Removing descriptor: 16 [ 856.682219] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.682535] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.684405] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.684405] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.684767] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.684767] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.684913] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.685069] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.685245] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.685409] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.686183] env[61648]: DEBUG nova.virt.hardware [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.686470] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0246ffc6-2686-45da-aaab-9fafc002457e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.696845] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190e8f2b-2cae-434e-a19e-bee6573ea4ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.712669] env[61648]: ERROR nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Traceback (most recent call last): [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] yield resources [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self.driver.spawn(context, instance, image_meta, [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] vm_ref = self.build_virtual_machine(instance, [ 856.712669] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] for vif in network_info: [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] return self._sync_wrapper(fn, *args, **kwargs) [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self.wait() [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self[:] = self._gt.wait() [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] return self._exit_event.wait() [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 856.713069] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] current.throw(*self._exc) [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] result = function(*args, **kwargs) [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] return func(*args, **kwargs) [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] raise e [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] nwinfo = self.network_api.allocate_for_instance( [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] created_port_ids = self._update_ports_for_instance( [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] with excutils.save_and_reraise_exception(): [ 856.713377] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self.force_reraise() [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] raise self.value [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] updated_port = self._update_port( [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] _ensure_no_port_binding_failure(port) [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] raise exception.PortBindingFailed(port_id=port['id']) [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 856.713703] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] [ 856.713703] env[61648]: INFO nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Terminating instance [ 856.714987] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.975863] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.025425] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.035672] env[61648]: DEBUG nova.network.neutron [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.041259] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.172852] env[61648]: DEBUG nova.network.neutron [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.543854] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Releasing lock "refresh_cache-1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.543991] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 857.544637] env[61648]: DEBUG nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 857.544637] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 857.560854] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.675627] env[61648]: DEBUG oslo_concurrency.lockutils [req-7e293dcc-5ff9-4213-9d36-037fed46640c req-98a09e29-243e-491e-bcfc-ea959fb2934d service nova] Releasing lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.676983] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.676983] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.687486] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d105805-b5a9-4670-a809-a4bc66fe524c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.696716] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b2a33d-c08f-4b92-a113-c12ac43c5aee {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.731474] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220b7e55-f4a6-4a1f-9aa4-b8a0bfa395d6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.736559] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1b5e32-7820-40fa-a2fe-44d82fd9aa52 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.754022] env[61648]: DEBUG nova.compute.provider_tree [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.067584] env[61648]: DEBUG nova.network.neutron [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.203797] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.256261] env[61648]: DEBUG nova.scheduler.client.report [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.301909] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.545984] env[61648]: DEBUG nova.compute.manager [req-c61ba062-090f-4c5c-9861-317ef3b499a7 req-32252872-f4d1-4560-aaa2-c602f5034707 service nova] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Received event network-vif-deleted-f89097cb-f6da-490f-bf27-bf292cbddd33 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.572896] env[61648]: INFO nova.compute.manager [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7] Took 1.03 seconds to deallocate network for instance. [ 858.671452] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "3002571b-4800-48a9-84c1-68f6d3e0cc70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.671452] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "3002571b-4800-48a9-84c1-68f6d3e0cc70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.766098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.766098] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 858.767496] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.911s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.769737] env[61648]: INFO nova.compute.claims [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.807448] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.807914] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 858.808144] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.808456] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-086cd4b4-8dfc-4be6-a222-1c376103a79d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.826194] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9d06ba-59a7-431d-a50a-cb95532e50b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.851880] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb could not be found. [ 858.852648] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.854021] env[61648]: INFO nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 858.854021] env[61648]: DEBUG oslo.service.loopingcall [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.854021] env[61648]: DEBUG nova.compute.manager [-] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.854021] env[61648]: DEBUG nova.network.neutron [-] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.871723] env[61648]: DEBUG nova.network.neutron [-] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.277550] env[61648]: DEBUG nova.compute.utils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.283854] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 859.284048] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.346234] env[61648]: DEBUG nova.policy [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '787d9130823549909ab3df06868bb3af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2154e6782ba43d1b1304d2b07ce91a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 859.373107] env[61648]: DEBUG nova.network.neutron [-] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.615493] env[61648]: INFO nova.scheduler.client.report [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Deleted allocations for instance 1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7 [ 859.770346] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Successfully created port: ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.783989] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 859.875946] env[61648]: INFO nova.compute.manager [-] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Took 1.02 seconds to deallocate network for instance. [ 859.878619] env[61648]: DEBUG nova.compute.claims [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 859.878844] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.031329] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d72e56a-3c1d-4502-8d46-48f998ab893c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.039091] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57579bcb-c032-49dc-9aac-bb6df70451e7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.074941] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1520e6de-b1a4-43fe-9800-241901478808 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.082797] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e89cbfa-a246-4c4c-a804-75cb9e259c99 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.096275] env[61648]: DEBUG nova.compute.provider_tree [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.127519] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0324731a-38b0-485a-8916-b8b080c1267e tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "1a78bab7-ee05-4f4c-bfea-8bf7aa13b7a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.148s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.521893] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquiring lock "8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.522208] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Lock "8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.599703] env[61648]: DEBUG nova.scheduler.client.report [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.615036] env[61648]: DEBUG nova.compute.manager [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Received event network-changed-ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.615036] env[61648]: DEBUG nova.compute.manager [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Refreshing instance network info cache due to event network-changed-ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 860.615036] env[61648]: DEBUG oslo_concurrency.lockutils [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] Acquiring lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.615036] env[61648]: DEBUG oslo_concurrency.lockutils [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] Acquired lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.615036] env[61648]: DEBUG nova.network.neutron [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Refreshing network info cache for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 860.631121] env[61648]: DEBUG nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 860.794136] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 860.813847] env[61648]: ERROR nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 860.813847] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 860.813847] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 860.813847] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 860.813847] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 860.813847] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 860.813847] env[61648]: ERROR nova.compute.manager raise self.value [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 860.813847] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 860.813847] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 860.813847] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 860.814812] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 860.814812] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 860.814812] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 860.814812] env[61648]: ERROR nova.compute.manager [ 860.814812] env[61648]: Traceback (most recent call last): [ 860.814812] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 860.814812] env[61648]: listener.cb(fileno) [ 860.814812] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 860.814812] env[61648]: result = function(*args, **kwargs) [ 860.814812] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 860.814812] env[61648]: return func(*args, **kwargs) [ 860.814812] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 860.814812] env[61648]: raise e [ 860.814812] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 860.814812] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 860.814812] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 860.814812] env[61648]: created_port_ids = self._update_ports_for_instance( [ 860.814812] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 860.814812] env[61648]: with excutils.save_and_reraise_exception(): [ 860.814812] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 860.814812] env[61648]: self.force_reraise() [ 860.814812] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 860.814812] env[61648]: raise self.value [ 860.814812] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 860.814812] env[61648]: updated_port = self._update_port( [ 860.814812] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 860.814812] env[61648]: _ensure_no_port_binding_failure(port) [ 860.814812] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 860.814812] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 860.816161] env[61648]: nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 860.816161] env[61648]: Removing descriptor: 19 [ 860.822354] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 860.822588] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 860.822741] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.822972] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 860.823070] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.823220] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 860.823422] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 860.823663] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 860.823862] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 860.824034] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 860.824206] env[61648]: DEBUG nova.virt.hardware [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.825051] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc91a9e-5cf3-4783-b4de-3ac16ffa796b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.833262] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e45587e-4125-4915-965a-3395c19cd6bf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.847370] env[61648]: ERROR nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Traceback (most recent call last): [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] yield resources [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self.driver.spawn(context, instance, image_meta, [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] vm_ref = self.build_virtual_machine(instance, [ 860.847370] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] vif_infos = vmwarevif.get_vif_info(self._session, [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] for vif in network_info: [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] return self._sync_wrapper(fn, *args, **kwargs) [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self.wait() [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self[:] = self._gt.wait() [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] return self._exit_event.wait() [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 860.847949] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] current.throw(*self._exc) [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] result = function(*args, **kwargs) [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] return func(*args, **kwargs) [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] raise e [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] nwinfo = self.network_api.allocate_for_instance( [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] created_port_ids = self._update_ports_for_instance( [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] with excutils.save_and_reraise_exception(): [ 860.848325] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self.force_reraise() [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] raise self.value [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] updated_port = self._update_port( [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] _ensure_no_port_binding_failure(port) [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] raise exception.PortBindingFailed(port_id=port['id']) [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 860.848672] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] [ 860.848672] env[61648]: INFO nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Terminating instance [ 860.849963] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.108346] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.109556] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.392s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.111177] env[61648]: INFO nova.compute.claims [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.135184] env[61648]: DEBUG nova.network.neutron [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.173506] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.261839] env[61648]: DEBUG nova.network.neutron [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.615858] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquiring lock "596d8dd6-0220-4092-88d4-976cbe1115a4" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.616218] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "596d8dd6-0220-4092-88d4-976cbe1115a4" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.765942] env[61648]: DEBUG oslo_concurrency.lockutils [req-f76111cc-30ce-45a0-8c62-f22a8a026003 req-2b994e54-40f0-457c-b2a6-4ba2d5ddaca6 service nova] Releasing lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.765942] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquired lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.765942] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.127928] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "596d8dd6-0220-4092-88d4-976cbe1115a4" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.512s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.128594] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 862.284670] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.308374] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07d5f16-335c-4f16-9f6f-7654c2c586b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.316500] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e47d83-6eff-41dd-839e-bb7917a3a3f5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.346693] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daacdbcb-1ce8-4f44-98e2-5ef6839c65e1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.353886] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a1cdd0-2516-445d-bc38-a2d4f54bab8b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.366357] env[61648]: DEBUG nova.compute.provider_tree [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.377887] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.633800] env[61648]: DEBUG nova.compute.utils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.635635] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 862.635850] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.655216] env[61648]: DEBUG nova.compute.manager [req-087ec1c5-3dcb-4362-a991-70565b73da6c req-eec1816f-acf8-4c11-ae8f-f12896649aef service nova] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Received event network-vif-deleted-ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.679979] env[61648]: DEBUG nova.policy [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c7d13d555b744a19ecf587e507e9d28', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd248b7727db44926be0a94cccae0e4f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.870636] env[61648]: DEBUG nova.scheduler.client.report [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.879906] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Releasing lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.880330] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 862.880525] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.880826] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cfdbfcc-1bb2-4bf8-9305-abc7f347efec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.892346] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef6f701-435a-4667-8fda-2be16f5d98c0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.916183] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b3ae23d8-e28e-460c-b9a3-4744f81f39ec could not be found. [ 862.916408] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.916585] env[61648]: INFO nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Took 0.04 seconds to destroy the instance on the hypervisor. [ 862.916820] env[61648]: DEBUG oslo.service.loopingcall [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.917045] env[61648]: DEBUG nova.compute.manager [-] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.917142] env[61648]: DEBUG nova.network.neutron [-] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.946464] env[61648]: DEBUG nova.network.neutron [-] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.961276] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Successfully created port: a325a575-6070-44f4-b327-2ea5459bfea9 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.139330] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 863.376239] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.377206] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 863.383133] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.743s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.450706] env[61648]: DEBUG nova.network.neutron [-] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.884707] env[61648]: ERROR nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 863.884707] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.884707] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 863.884707] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 863.884707] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.884707] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.884707] env[61648]: ERROR nova.compute.manager raise self.value [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 863.884707] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 863.884707] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.884707] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 863.885407] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.885407] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 863.885407] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 863.885407] env[61648]: ERROR nova.compute.manager [ 863.885407] env[61648]: Traceback (most recent call last): [ 863.885407] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 863.885407] env[61648]: listener.cb(fileno) [ 863.885407] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 863.885407] env[61648]: result = function(*args, **kwargs) [ 863.885407] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 863.885407] env[61648]: return func(*args, **kwargs) [ 863.885407] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 863.885407] env[61648]: raise e [ 863.885407] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.885407] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 863.885407] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 863.885407] env[61648]: created_port_ids = self._update_ports_for_instance( [ 863.885407] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 863.885407] env[61648]: with excutils.save_and_reraise_exception(): [ 863.885407] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.885407] env[61648]: self.force_reraise() [ 863.885407] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.885407] env[61648]: raise self.value [ 863.885407] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 863.885407] env[61648]: updated_port = self._update_port( [ 863.885407] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.885407] env[61648]: _ensure_no_port_binding_failure(port) [ 863.885407] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.885407] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 863.886297] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 863.886297] env[61648]: Removing descriptor: 19 [ 863.891861] env[61648]: DEBUG nova.compute.utils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.901087] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 863.901087] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.946691] env[61648]: DEBUG nova.policy [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99aee7b8e206476f86165128d056b68c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f74c8585407422bbaab2c440dce9489', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.954454] env[61648]: INFO nova.compute.manager [-] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Took 1.04 seconds to deallocate network for instance. [ 863.956894] env[61648]: DEBUG nova.compute.claims [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 863.957030] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.118553] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3efca04-61a7-456a-9726-c76c76937bc0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.127653] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ed5e37-9442-4b72-8f1c-41d2b54ee8e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.161261] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 864.164530] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ae7735-1ff1-46ab-80b2-84fae227f7c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.171546] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20f3878-e549-4d8c-bbb3-c1f38be1343c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.185984] env[61648]: DEBUG nova.compute.provider_tree [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.197450] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.197701] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.197859] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.198062] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.198378] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.198578] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.198842] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.199029] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.199201] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.199377] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.199608] env[61648]: DEBUG nova.virt.hardware [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.200629] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a280a27-c1a0-4343-8d47-973e6ba9a924 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.208525] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32088354-03bf-40a6-ae5b-9106775e3e2e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.223363] env[61648]: ERROR nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Traceback (most recent call last): [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] yield resources [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self.driver.spawn(context, instance, image_meta, [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self._vmops.spawn(context, instance, image_meta, injected_files, [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] vm_ref = self.build_virtual_machine(instance, [ 864.223363] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] vif_infos = vmwarevif.get_vif_info(self._session, [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] for vif in network_info: [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] return self._sync_wrapper(fn, *args, **kwargs) [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self.wait() [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self[:] = self._gt.wait() [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] return self._exit_event.wait() [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 864.223697] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] current.throw(*self._exc) [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] result = function(*args, **kwargs) [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] return func(*args, **kwargs) [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] raise e [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] nwinfo = self.network_api.allocate_for_instance( [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] created_port_ids = self._update_ports_for_instance( [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] with excutils.save_and_reraise_exception(): [ 864.224030] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self.force_reraise() [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] raise self.value [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] updated_port = self._update_port( [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] _ensure_no_port_binding_failure(port) [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] raise exception.PortBindingFailed(port_id=port['id']) [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 864.224346] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] [ 864.224346] env[61648]: INFO nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Terminating instance [ 864.225655] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquiring lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.225825] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquired lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.225999] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.279554] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Successfully created port: a9db426e-3abb-46bd-864b-92ed5e4b6f9d {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.398586] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 864.684798] env[61648]: DEBUG nova.compute.manager [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Received event network-changed-a325a575-6070-44f4-b327-2ea5459bfea9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.684998] env[61648]: DEBUG nova.compute.manager [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Refreshing instance network info cache due to event network-changed-a325a575-6070-44f4-b327-2ea5459bfea9. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 864.685501] env[61648]: DEBUG oslo_concurrency.lockutils [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] Acquiring lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.688650] env[61648]: DEBUG nova.scheduler.client.report [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.748369] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.828224] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.189945] env[61648]: ERROR nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 865.189945] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 865.189945] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 865.189945] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 865.189945] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 865.189945] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 865.189945] env[61648]: ERROR nova.compute.manager raise self.value [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 865.189945] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 865.189945] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 865.189945] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 865.190737] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 865.190737] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 865.190737] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 865.190737] env[61648]: ERROR nova.compute.manager [ 865.190737] env[61648]: Traceback (most recent call last): [ 865.190737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 865.190737] env[61648]: listener.cb(fileno) [ 865.190737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 865.190737] env[61648]: result = function(*args, **kwargs) [ 865.190737] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 865.190737] env[61648]: return func(*args, **kwargs) [ 865.190737] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 865.190737] env[61648]: raise e [ 865.190737] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 865.190737] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 865.190737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 865.190737] env[61648]: created_port_ids = self._update_ports_for_instance( [ 865.190737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 865.190737] env[61648]: with excutils.save_and_reraise_exception(): [ 865.190737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 865.190737] env[61648]: self.force_reraise() [ 865.190737] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 865.190737] env[61648]: raise self.value [ 865.190737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 865.190737] env[61648]: updated_port = self._update_port( [ 865.190737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 865.190737] env[61648]: _ensure_no_port_binding_failure(port) [ 865.190737] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 865.190737] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 865.191500] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 865.191500] env[61648]: Removing descriptor: 19 [ 865.193400] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.810s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.194013] env[61648]: ERROR nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] Traceback (most recent call last): [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self.driver.spawn(context, instance, image_meta, [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self._vmops.spawn(context, instance, image_meta, injected_files, [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] vm_ref = self.build_virtual_machine(instance, [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] vif_infos = vmwarevif.get_vif_info(self._session, [ 865.194013] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] for vif in network_info: [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return self._sync_wrapper(fn, *args, **kwargs) [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self.wait() [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self[:] = self._gt.wait() [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return self._exit_event.wait() [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] result = hub.switch() [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 865.194340] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return self.greenlet.switch() [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] result = function(*args, **kwargs) [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] return func(*args, **kwargs) [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] raise e [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] nwinfo = self.network_api.allocate_for_instance( [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] created_port_ids = self._update_ports_for_instance( [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] with excutils.save_and_reraise_exception(): [ 865.194705] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] self.force_reraise() [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] raise self.value [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] updated_port = self._update_port( [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] _ensure_no_port_binding_failure(port) [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] raise exception.PortBindingFailed(port_id=port['id']) [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] nova.exception.PortBindingFailed: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. [ 865.195109] env[61648]: ERROR nova.compute.manager [instance: db35b417-bcdb-4380-927a-f755e6421624] [ 865.195377] env[61648]: DEBUG nova.compute.utils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 865.196150] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.594s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.198961] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Build of instance db35b417-bcdb-4380-927a-f755e6421624 was re-scheduled: Binding failed for port eaee6d3c-9b6c-4c24-863e-7248ef491f8f, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 865.199397] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 865.199615] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquiring lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.199758] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Acquired lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.200617] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.331127] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Releasing lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.331397] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 865.331583] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.331884] env[61648]: DEBUG oslo_concurrency.lockutils [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] Acquired lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.332061] env[61648]: DEBUG nova.network.neutron [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Refreshing network info cache for port a325a575-6070-44f4-b327-2ea5459bfea9 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.333106] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2c5dc89-0415-4c95-9f48-23d36497327c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.342793] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c438ba2-8902-4966-9a2f-dda7c63341d4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.366214] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 34dc9640-9b39-4e3b-b8ca-7a29ab760992 could not be found. [ 865.366435] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 865.366612] env[61648]: INFO nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Took 0.04 seconds to destroy the instance on the hypervisor. [ 865.366846] env[61648]: DEBUG oslo.service.loopingcall [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.367073] env[61648]: DEBUG nova.compute.manager [-] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 865.367168] env[61648]: DEBUG nova.network.neutron [-] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 865.382896] env[61648]: DEBUG nova.network.neutron [-] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.409798] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 865.440052] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.440305] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.440458] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.440636] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.440781] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.440995] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.441293] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.441519] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.441746] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.441955] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.442170] env[61648]: DEBUG nova.virt.hardware [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.443034] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a56b69-d430-43a4-ac95-4e3b055b7207 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.450541] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0236f6-5712-460d-a23b-ab03a315880a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.464391] env[61648]: ERROR nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Traceback (most recent call last): [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] yield resources [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self.driver.spawn(context, instance, image_meta, [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] vm_ref = self.build_virtual_machine(instance, [ 865.464391] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] vif_infos = vmwarevif.get_vif_info(self._session, [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] for vif in network_info: [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] return self._sync_wrapper(fn, *args, **kwargs) [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self.wait() [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self[:] = self._gt.wait() [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] return self._exit_event.wait() [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 865.465036] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] current.throw(*self._exc) [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] result = function(*args, **kwargs) [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] return func(*args, **kwargs) [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] raise e [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] nwinfo = self.network_api.allocate_for_instance( [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] created_port_ids = self._update_ports_for_instance( [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] with excutils.save_and_reraise_exception(): [ 865.465625] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self.force_reraise() [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] raise self.value [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] updated_port = self._update_port( [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] _ensure_no_port_binding_failure(port) [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] raise exception.PortBindingFailed(port_id=port['id']) [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 865.466254] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] [ 865.466254] env[61648]: INFO nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Terminating instance [ 865.467591] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.467591] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquired lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.467799] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.720867] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.775258] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.854771] env[61648]: DEBUG nova.network.neutron [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.885102] env[61648]: DEBUG nova.network.neutron [-] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.947197] env[61648]: DEBUG nova.network.neutron [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.965156] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edf3cbc-1ee2-485c-8851-a62eb7943eb3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.973783] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37829c7-cbc0-4235-98a6-c8d209525b64 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.004959] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.007062] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda97b5f-ac52-49c0-a5b1-658138a5a85f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.014358] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f0c678-d452-4cc1-8fbf-68fcbfb6ef7f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.028047] env[61648]: DEBUG nova.compute.provider_tree [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.077705] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.277898] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Releasing lock "refresh_cache-db35b417-bcdb-4380-927a-f755e6421624" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.278206] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 866.278609] env[61648]: DEBUG nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 866.278609] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.303126] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.387632] env[61648]: INFO nova.compute.manager [-] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Took 1.02 seconds to deallocate network for instance. [ 866.390166] env[61648]: DEBUG nova.compute.claims [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 866.390474] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.453353] env[61648]: DEBUG oslo_concurrency.lockutils [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] Releasing lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.453632] env[61648]: DEBUG nova.compute.manager [req-b8e6607b-a2d4-412a-8924-7526bb45376d req-46d66bcc-5918-4795-9189-91cabfaee098 service nova] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Received event network-vif-deleted-a325a575-6070-44f4-b327-2ea5459bfea9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.531124] env[61648]: DEBUG nova.scheduler.client.report [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.580354] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Releasing lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.580772] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 866.580994] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.581287] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75d51e0e-c232-47cb-ad09-7d497f276824 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.590783] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fd2370-8808-42f4-b8b2-939c164da056 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.612184] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a could not be found. [ 866.612390] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 866.612572] env[61648]: INFO nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 866.612798] env[61648]: DEBUG oslo.service.loopingcall [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.613016] env[61648]: DEBUG nova.compute.manager [-] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 866.613116] env[61648]: DEBUG nova.network.neutron [-] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.628687] env[61648]: DEBUG nova.network.neutron [-] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.806405] env[61648]: DEBUG nova.network.neutron [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.833658] env[61648]: DEBUG nova.compute.manager [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Received event network-changed-a9db426e-3abb-46bd-864b-92ed5e4b6f9d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.833790] env[61648]: DEBUG nova.compute.manager [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Refreshing instance network info cache due to event network-changed-a9db426e-3abb-46bd-864b-92ed5e4b6f9d. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 866.834013] env[61648]: DEBUG oslo_concurrency.lockutils [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] Acquiring lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.834158] env[61648]: DEBUG oslo_concurrency.lockutils [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] Acquired lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.834327] env[61648]: DEBUG nova.network.neutron [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Refreshing network info cache for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.035990] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.840s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.036651] env[61648]: ERROR nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Traceback (most recent call last): [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self.driver.spawn(context, instance, image_meta, [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] vm_ref = self.build_virtual_machine(instance, [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] vif_infos = vmwarevif.get_vif_info(self._session, [ 867.036651] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] for vif in network_info: [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return self._sync_wrapper(fn, *args, **kwargs) [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self.wait() [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self[:] = self._gt.wait() [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return self._exit_event.wait() [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] result = hub.switch() [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 867.037081] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return self.greenlet.switch() [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] result = function(*args, **kwargs) [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] return func(*args, **kwargs) [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] raise e [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] nwinfo = self.network_api.allocate_for_instance( [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] created_port_ids = self._update_ports_for_instance( [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] with excutils.save_and_reraise_exception(): [ 867.037482] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] self.force_reraise() [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] raise self.value [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] updated_port = self._update_port( [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] _ensure_no_port_binding_failure(port) [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] raise exception.PortBindingFailed(port_id=port['id']) [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] nova.exception.PortBindingFailed: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. [ 867.037874] env[61648]: ERROR nova.compute.manager [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] [ 867.038256] env[61648]: DEBUG nova.compute.utils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 867.038883] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.185s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.038883] env[61648]: DEBUG nova.objects.instance [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61648) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 867.041459] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Build of instance aa8fb674-60e3-431c-b8c3-9cc548965e18 was re-scheduled: Binding failed for port cab8fc31-5619-4956-a775-42241810c27c, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 867.041886] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 867.042118] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.042267] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.042440] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.130778] env[61648]: DEBUG nova.network.neutron [-] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.309335] env[61648]: INFO nova.compute.manager [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] [instance: db35b417-bcdb-4380-927a-f755e6421624] Took 1.03 seconds to deallocate network for instance. [ 867.352282] env[61648]: DEBUG nova.network.neutron [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.428594] env[61648]: DEBUG nova.network.neutron [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.562548] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.633488] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.634752] env[61648]: INFO nova.compute.manager [-] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Took 1.02 seconds to deallocate network for instance. [ 867.637656] env[61648]: DEBUG nova.compute.claims [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 867.637837] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.932064] env[61648]: DEBUG oslo_concurrency.lockutils [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] Releasing lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.932064] env[61648]: DEBUG nova.compute.manager [req-48aafac4-543f-411c-bd85-263bc9c9aa48 req-9a6d371f-a433-4f6c-a36a-a763aed9c715 service nova] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Received event network-vif-deleted-a9db426e-3abb-46bd-864b-92ed5e4b6f9d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.051920] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e56c7052-9e74-44e3-968e-9c734ba6ab97 tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.053097] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.741s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.138218] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-aa8fb674-60e3-431c-b8c3-9cc548965e18" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.138588] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 868.138907] env[61648]: DEBUG nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 868.139194] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 868.155819] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.341207] env[61648]: INFO nova.scheduler.client.report [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Deleted allocations for instance db35b417-bcdb-4380-927a-f755e6421624 [ 868.659163] env[61648]: DEBUG nova.network.neutron [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.849061] env[61648]: DEBUG oslo_concurrency.lockutils [None req-695b5172-df45-4bf7-94b9-13dd4a31c6c2 tempest-ListImageFiltersTestJSON-63147329 tempest-ListImageFiltersTestJSON-63147329-project-member] Lock "db35b417-bcdb-4380-927a-f755e6421624" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.514s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.086061] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 32a2c7ce-2980-4eac-ad52-b8d5d67d669b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 869.086270] env[61648]: WARNING nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance acc5b6cb-16ee-4756-9088-fa094eb83daa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 869.163227] env[61648]: INFO nova.compute.manager [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: aa8fb674-60e3-431c-b8c3-9cc548965e18] Took 1.02 seconds to deallocate network for instance. [ 869.351949] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 869.589733] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance aa8fb674-60e3-431c-b8c3-9cc548965e18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.590011] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 869.590106] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance b3ae23d8-e28e-460c-b9a3-4744f81f39ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 869.590191] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 34dc9640-9b39-4e3b-b8ca-7a29ab760992 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 869.590265] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 869.875213] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.097364] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 03f59be3-f1bb-4e3a-96ea-7b39de515397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.196405] env[61648]: INFO nova.scheduler.client.report [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Deleted allocations for instance aa8fb674-60e3-431c-b8c3-9cc548965e18 [ 870.600950] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance d37aad4c-f4e9-40ab-a250-5dd3924f305c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.709505] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c3d3a93f-2c99-46aa-894b-12bc69dd2b53 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "aa8fb674-60e3-431c-b8c3-9cc548965e18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.620s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.105218] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance c98567aa-d978-4b4d-9e01-25ab70246205 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 871.215024] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 871.552898] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "6c2f92c0-1346-4c9a-aa96-168b0987bb2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.553376] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "6c2f92c0-1346-4c9a-aa96-168b0987bb2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.611848] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance e4adb624-e900-4838-a5c5-2cd0d488f458 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 871.732123] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.117366] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance b1cd355c-ea96-4ff9-aa40-6605c8b73e3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 872.621032] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 3002571b-4800-48a9-84c1-68f6d3e0cc70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 873.126849] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 873.126849] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 873.126849] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 873.311019] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b2a5f3-715a-43e7-a24f-5822e56a00ca {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.316067] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c5681b-fe4b-4542-ad89-2c29a9e04629 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.346536] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a9726c-de51-43e0-a9c3-bcec20c0ca35 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.353533] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e17583-4ebb-42ca-84c9-4d4493ac807e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.366343] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.872234] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 874.377312] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 874.377312] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.324s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.377489] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.118s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.379189] env[61648]: INFO nova.compute.claims [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.542429] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9ebdc6-8e0e-4e27-bd40-c7dd07a51410 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.549569] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f724619b-e009-45bc-899a-aea983d7dddb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.578836] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cd7a20-ff6b-4822-8798-b62c79843961 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.585306] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc3af56-4b72-4f41-bb09-7669bde7f56e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.597751] env[61648]: DEBUG nova.compute.provider_tree [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.101451] env[61648]: DEBUG nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 876.606817] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.607335] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 876.610154] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.553s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.611609] env[61648]: INFO nova.compute.claims [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.116587] env[61648]: DEBUG nova.compute.utils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.119864] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 877.120088] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 877.172398] env[61648]: DEBUG nova.policy [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75788746b2214f2e8c1a8884c89ddb9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd94e7e89f424d34920f0fa92acf3226', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.456122] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Successfully created port: 1965ab3f-57b4-41f5-bea6-409d80b49b2a {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.621306] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 877.795091] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e5dad0-728c-48de-abb9-cb1ca591d57b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.806989] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e709c55f-6fe0-429e-a883-cf475a976ab3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.836643] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c75105f-414f-45a5-a1d8-9f1e0ca2d09a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.843421] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a76ddc-aa2f-4fa6-a525-554a86c8e59b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.856118] env[61648]: DEBUG nova.compute.provider_tree [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.155751] env[61648]: DEBUG nova.compute.manager [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Received event network-changed-1965ab3f-57b4-41f5-bea6-409d80b49b2a {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.155980] env[61648]: DEBUG nova.compute.manager [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Refreshing instance network info cache due to event network-changed-1965ab3f-57b4-41f5-bea6-409d80b49b2a. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 878.156210] env[61648]: DEBUG oslo_concurrency.lockutils [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] Acquiring lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.156358] env[61648]: DEBUG oslo_concurrency.lockutils [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] Acquired lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.156516] env[61648]: DEBUG nova.network.neutron [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Refreshing network info cache for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.353447] env[61648]: ERROR nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 878.353447] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.353447] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.353447] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.353447] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.353447] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.353447] env[61648]: ERROR nova.compute.manager raise self.value [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.353447] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 878.353447] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.353447] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 878.353920] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.353920] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 878.353920] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 878.353920] env[61648]: ERROR nova.compute.manager [ 878.353920] env[61648]: Traceback (most recent call last): [ 878.353920] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 878.353920] env[61648]: listener.cb(fileno) [ 878.353920] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.353920] env[61648]: result = function(*args, **kwargs) [ 878.353920] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 878.353920] env[61648]: return func(*args, **kwargs) [ 878.353920] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.353920] env[61648]: raise e [ 878.353920] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.353920] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 878.353920] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.353920] env[61648]: created_port_ids = self._update_ports_for_instance( [ 878.353920] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.353920] env[61648]: with excutils.save_and_reraise_exception(): [ 878.353920] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.353920] env[61648]: self.force_reraise() [ 878.353920] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.353920] env[61648]: raise self.value [ 878.353920] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.353920] env[61648]: updated_port = self._update_port( [ 878.353920] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.353920] env[61648]: _ensure_no_port_binding_failure(port) [ 878.353920] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.353920] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 878.354714] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 878.354714] env[61648]: Removing descriptor: 19 [ 878.358690] env[61648]: DEBUG nova.scheduler.client.report [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.638091] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 878.663225] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.663487] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.663627] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.663796] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.663939] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.664096] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.664301] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.664520] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.664630] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.664770] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.664937] env[61648]: DEBUG nova.virt.hardware [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.665841] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4209e5b6-7ae0-4ab5-9582-2ee2e73dee98 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.674274] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824b89e5-bc52-446e-91e8-4b5ad887df34 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.688057] env[61648]: ERROR nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Traceback (most recent call last): [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] yield resources [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self.driver.spawn(context, instance, image_meta, [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self._vmops.spawn(context, instance, image_meta, injected_files, [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] vm_ref = self.build_virtual_machine(instance, [ 878.688057] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] vif_infos = vmwarevif.get_vif_info(self._session, [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] for vif in network_info: [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] return self._sync_wrapper(fn, *args, **kwargs) [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self.wait() [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self[:] = self._gt.wait() [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] return self._exit_event.wait() [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 878.688415] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] current.throw(*self._exc) [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] result = function(*args, **kwargs) [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] return func(*args, **kwargs) [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] raise e [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] nwinfo = self.network_api.allocate_for_instance( [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] created_port_ids = self._update_ports_for_instance( [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] with excutils.save_and_reraise_exception(): [ 878.688763] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self.force_reraise() [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] raise self.value [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] updated_port = self._update_port( [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] _ensure_no_port_binding_failure(port) [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] raise exception.PortBindingFailed(port_id=port['id']) [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 878.689106] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] [ 878.689106] env[61648]: INFO nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Terminating instance [ 878.691251] env[61648]: DEBUG nova.network.neutron [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.693233] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.760861] env[61648]: DEBUG nova.network.neutron [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.865405] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.865903] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 878.868584] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.310s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.868777] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.871074] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.848s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.876020] env[61648]: INFO nova.compute.claims [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.928298] env[61648]: INFO nova.scheduler.client.report [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleted allocations for instance acc5b6cb-16ee-4756-9088-fa094eb83daa [ 879.263022] env[61648]: DEBUG oslo_concurrency.lockutils [req-71791b46-efe8-4fd3-9eb0-a6a5017dff65 req-81e86074-7a0f-4eef-ba65-31d2e8f23cb2 service nova] Releasing lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.263711] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.263916] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.382388] env[61648]: DEBUG nova.compute.utils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.384159] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 879.384327] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.439021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b55600e0-38bd-4707-9562-13ee923f0a3b tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "acc5b6cb-16ee-4756-9088-fa094eb83daa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.612s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.467355] env[61648]: DEBUG nova.policy [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '876b46a0e3c542eb9e267f0e0615123c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ccacb6024de431092dd0610c5ca38cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.756258] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.756564] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.756758] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.756947] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.757659] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.761909] env[61648]: INFO nova.compute.manager [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Terminating instance [ 879.764699] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "refresh_cache-32a2c7ce-2980-4eac-ad52-b8d5d67d669b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.764857] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquired lock "refresh_cache-32a2c7ce-2980-4eac-ad52-b8d5d67d669b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.765130] env[61648]: DEBUG nova.network.neutron [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.782129] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Successfully created port: d337f60c-a4b9-4593-97a0-7614a0295771 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.784652] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.857931] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.888227] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 880.048257] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6f8de7-1ff7-422a-a5d8-4d4999797ec7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.055759] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f259c8-4227-46f5-a965-c76d36e8b5dd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.086641] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0ba406-ed76-497e-a479-ae85f267f834 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.094048] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271b94ad-89df-47b6-8c32-19e6651b5775 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.107178] env[61648]: DEBUG nova.compute.provider_tree [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.192132] env[61648]: DEBUG nova.compute.manager [req-90a311bd-f3b4-4a26-a453-563c51f0676a req-380a24bc-125c-4d7e-95fd-75551e1ca87c service nova] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Received event network-vif-deleted-1965ab3f-57b4-41f5-bea6-409d80b49b2a {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.283259] env[61648]: DEBUG nova.network.neutron [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.345263] env[61648]: DEBUG nova.network.neutron [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.360109] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.360709] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 880.363019] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.363019] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38b861a3-34f3-4baf-b1b9-6979f0606b6f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.371837] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6db4393-de48-4fd6-a7ee-a1fdd317eccd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.400594] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03f59be3-f1bb-4e3a-96ea-7b39de515397 could not be found. [ 880.401044] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.401469] env[61648]: INFO nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Took 0.04 seconds to destroy the instance on the hypervisor. [ 880.401883] env[61648]: DEBUG oslo.service.loopingcall [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.402680] env[61648]: DEBUG nova.compute.manager [-] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.402942] env[61648]: DEBUG nova.network.neutron [-] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.420703] env[61648]: DEBUG nova.network.neutron [-] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.613021] env[61648]: DEBUG nova.scheduler.client.report [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.711478] env[61648]: ERROR nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 880.711478] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 880.711478] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 880.711478] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 880.711478] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.711478] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.711478] env[61648]: ERROR nova.compute.manager raise self.value [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 880.711478] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 880.711478] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 880.711478] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 880.711953] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 880.711953] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 880.711953] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 880.711953] env[61648]: ERROR nova.compute.manager [ 880.711953] env[61648]: Traceback (most recent call last): [ 880.711953] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 880.711953] env[61648]: listener.cb(fileno) [ 880.711953] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 880.711953] env[61648]: result = function(*args, **kwargs) [ 880.711953] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 880.711953] env[61648]: return func(*args, **kwargs) [ 880.711953] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 880.711953] env[61648]: raise e [ 880.711953] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 880.711953] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 880.711953] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 880.711953] env[61648]: created_port_ids = self._update_ports_for_instance( [ 880.711953] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 880.711953] env[61648]: with excutils.save_and_reraise_exception(): [ 880.711953] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.711953] env[61648]: self.force_reraise() [ 880.711953] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.711953] env[61648]: raise self.value [ 880.711953] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 880.711953] env[61648]: updated_port = self._update_port( [ 880.711953] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 880.711953] env[61648]: _ensure_no_port_binding_failure(port) [ 880.711953] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 880.711953] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 880.712736] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 880.712736] env[61648]: Removing descriptor: 19 [ 880.849064] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Releasing lock "refresh_cache-32a2c7ce-2980-4eac-ad52-b8d5d67d669b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.849640] env[61648]: DEBUG nova.compute.manager [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 880.849923] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.850865] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff8765b-9fa7-432d-83d7-4354b4d17a06 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.858885] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.859239] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-370ae91b-3b1b-4749-9adc-9c6ea5d83e0e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.864969] env[61648]: DEBUG oslo_vmware.api [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 880.864969] env[61648]: value = "task-1336700" [ 880.864969] env[61648]: _type = "Task" [ 880.864969] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.873315] env[61648]: DEBUG oslo_vmware.api [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336700, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.904359] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 880.924194] env[61648]: DEBUG nova.network.neutron [-] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.938795] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.939285] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.939603] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.939897] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.940166] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.940405] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.940689] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.940924] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.941178] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.941433] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.941761] env[61648]: DEBUG nova.virt.hardware [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.944636] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8dcca7-7401-4caa-a066-fa30c2f3f675 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.953530] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bccebb-5526-476d-9249-e5b50590cad4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.974871] env[61648]: ERROR nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Traceback (most recent call last): [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] yield resources [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self.driver.spawn(context, instance, image_meta, [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] vm_ref = self.build_virtual_machine(instance, [ 880.974871] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] vif_infos = vmwarevif.get_vif_info(self._session, [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] for vif in network_info: [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] return self._sync_wrapper(fn, *args, **kwargs) [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self.wait() [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self[:] = self._gt.wait() [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] return self._exit_event.wait() [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 880.975319] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] current.throw(*self._exc) [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] result = function(*args, **kwargs) [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] return func(*args, **kwargs) [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] raise e [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] nwinfo = self.network_api.allocate_for_instance( [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] created_port_ids = self._update_ports_for_instance( [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] with excutils.save_and_reraise_exception(): [ 880.975681] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self.force_reraise() [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] raise self.value [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] updated_port = self._update_port( [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] _ensure_no_port_binding_failure(port) [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] raise exception.PortBindingFailed(port_id=port['id']) [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 880.976058] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] [ 880.976058] env[61648]: INFO nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Terminating instance [ 880.978135] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.978383] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquired lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.978629] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.116074] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.116595] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 881.123021] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.241s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.375424] env[61648]: DEBUG oslo_vmware.api [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336700, 'name': PowerOffVM_Task, 'duration_secs': 0.108636} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.375706] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.375911] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.376220] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-399b441f-8f7f-4c52-af57-0518487f6efd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.402265] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.402468] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.402800] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleting the datastore file [datastore2] 32a2c7ce-2980-4eac-ad52-b8d5d67d669b {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.403597] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30d0a0f2-087f-4d9d-9f3a-2adcfa5699a7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.409677] env[61648]: DEBUG oslo_vmware.api [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for the task: (returnval){ [ 881.409677] env[61648]: value = "task-1336702" [ 881.409677] env[61648]: _type = "Task" [ 881.409677] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.419577] env[61648]: DEBUG oslo_vmware.api [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.428185] env[61648]: INFO nova.compute.manager [-] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Took 1.03 seconds to deallocate network for instance. [ 881.432129] env[61648]: DEBUG nova.compute.claims [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 881.432129] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.496685] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.597728] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.622595] env[61648]: DEBUG nova.compute.utils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.624389] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.624389] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.670132] env[61648]: DEBUG nova.policy [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05baa26c79e1430c9945bfa82bd802dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '413caef8b4b34ad49a8aa707ca007dbd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.816217] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84bc2b3-f30c-4b75-979b-790c1dba879b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.823554] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff333dd0-b763-4bf1-bb8b-0b863748021e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.852064] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4c1b28-efc9-41d9-b575-a7dfc6d37e06 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.858854] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25b450f-25a6-43ee-9e84-e3d091a6a4be {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.872834] env[61648]: DEBUG nova.compute.provider_tree [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 881.919701] env[61648]: DEBUG oslo_vmware.api [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Task: {'id': task-1336702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090259} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.919964] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 881.920191] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 881.920375] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 881.920547] env[61648]: INFO nova.compute.manager [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Took 1.07 seconds to destroy the instance on the hypervisor. [ 881.920786] env[61648]: DEBUG oslo.service.loopingcall [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.920973] env[61648]: DEBUG nova.compute.manager [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 881.921079] env[61648]: DEBUG nova.network.neutron [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 881.952188] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Successfully created port: eec74a3c-8cd3-4070-8d1c-1bc8628319eb {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.954651] env[61648]: DEBUG nova.network.neutron [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.100614] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Releasing lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.101066] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 882.101264] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 882.102078] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2c0f9ff-561b-4e36-ae6c-4f1f7d8004dc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.110977] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798f1455-a029-46f1-84d7-5983f3f506f4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.131537] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 882.134450] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d37aad4c-f4e9-40ab-a250-5dd3924f305c could not be found. [ 882.134646] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.134826] env[61648]: INFO nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 882.135077] env[61648]: DEBUG oslo.service.loopingcall [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.135502] env[61648]: DEBUG nova.compute.manager [-] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 882.135601] env[61648]: DEBUG nova.network.neutron [-] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 882.150640] env[61648]: DEBUG nova.network.neutron [-] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.219581] env[61648]: DEBUG nova.compute.manager [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Received event network-changed-d337f60c-a4b9-4593-97a0-7614a0295771 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.219581] env[61648]: DEBUG nova.compute.manager [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Refreshing instance network info cache due to event network-changed-d337f60c-a4b9-4593-97a0-7614a0295771. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 882.219581] env[61648]: DEBUG oslo_concurrency.lockutils [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] Acquiring lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.219581] env[61648]: DEBUG oslo_concurrency.lockutils [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] Acquired lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.219581] env[61648]: DEBUG nova.network.neutron [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Refreshing network info cache for port d337f60c-a4b9-4593-97a0-7614a0295771 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.392610] env[61648]: ERROR nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [req-dd0ff71a-545b-4ac6-937b-f8b4936e11ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd0ff71a-545b-4ac6-937b-f8b4936e11ad"}]}: nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 882.408968] env[61648]: DEBUG nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 882.421921] env[61648]: DEBUG nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 882.422150] env[61648]: DEBUG nova.compute.provider_tree [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.436780] env[61648]: DEBUG nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 882.459478] env[61648]: DEBUG nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 882.461652] env[61648]: DEBUG nova.network.neutron [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.655986] env[61648]: DEBUG nova.network.neutron [-] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.683862] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ec97ca-8406-4122-9884-58f687f41f7d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.692428] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bd6a81-31be-44b7-a9af-7b4dae8c3786 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.724950] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782c8be8-d1a3-4d0a-a2dd-03f4b6e9abc6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.732533] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0752817-e016-4245-ba4a-29941c2ada36 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.746139] env[61648]: DEBUG nova.compute.provider_tree [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.752129] env[61648]: DEBUG nova.network.neutron [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.857442] env[61648]: DEBUG nova.network.neutron [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.911226] env[61648]: ERROR nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 882.911226] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 882.911226] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 882.911226] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 882.911226] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 882.911226] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 882.911226] env[61648]: ERROR nova.compute.manager raise self.value [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 882.911226] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 882.911226] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 882.911226] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 882.912040] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 882.912040] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 882.912040] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 882.912040] env[61648]: ERROR nova.compute.manager [ 882.912040] env[61648]: Traceback (most recent call last): [ 882.912040] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 882.912040] env[61648]: listener.cb(fileno) [ 882.912040] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 882.912040] env[61648]: result = function(*args, **kwargs) [ 882.912040] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 882.912040] env[61648]: return func(*args, **kwargs) [ 882.912040] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 882.912040] env[61648]: raise e [ 882.912040] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 882.912040] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 882.912040] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 882.912040] env[61648]: created_port_ids = self._update_ports_for_instance( [ 882.912040] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 882.912040] env[61648]: with excutils.save_and_reraise_exception(): [ 882.912040] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 882.912040] env[61648]: self.force_reraise() [ 882.912040] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 882.912040] env[61648]: raise self.value [ 882.912040] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 882.912040] env[61648]: updated_port = self._update_port( [ 882.912040] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 882.912040] env[61648]: _ensure_no_port_binding_failure(port) [ 882.912040] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 882.912040] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 882.913568] env[61648]: nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 882.913568] env[61648]: Removing descriptor: 19 [ 882.964948] env[61648]: INFO nova.compute.manager [-] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Took 1.04 seconds to deallocate network for instance. [ 883.143557] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 883.158338] env[61648]: INFO nova.compute.manager [-] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Took 1.02 seconds to deallocate network for instance. [ 883.168119] env[61648]: DEBUG nova.compute.claims [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 883.168352] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.170723] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.170942] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.171108] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.171287] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.171912] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.171912] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.171912] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.172105] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.172163] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.172312] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.172503] env[61648]: DEBUG nova.virt.hardware [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.173358] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c752d1b0-dfc3-4dde-a61a-7d5ddcd25789 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.187309] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca2f652-5d8b-416d-844c-35a4c2fe627a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.201555] env[61648]: ERROR nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Traceback (most recent call last): [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] yield resources [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self.driver.spawn(context, instance, image_meta, [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self._vmops.spawn(context, instance, image_meta, injected_files, [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] vm_ref = self.build_virtual_machine(instance, [ 883.201555] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] vif_infos = vmwarevif.get_vif_info(self._session, [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] for vif in network_info: [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] return self._sync_wrapper(fn, *args, **kwargs) [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self.wait() [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self[:] = self._gt.wait() [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] return self._exit_event.wait() [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 883.202142] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] current.throw(*self._exc) [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] result = function(*args, **kwargs) [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] return func(*args, **kwargs) [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] raise e [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] nwinfo = self.network_api.allocate_for_instance( [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] created_port_ids = self._update_ports_for_instance( [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] with excutils.save_and_reraise_exception(): [ 883.202698] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self.force_reraise() [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] raise self.value [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] updated_port = self._update_port( [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] _ensure_no_port_binding_failure(port) [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] raise exception.PortBindingFailed(port_id=port['id']) [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 883.203894] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] [ 883.203894] env[61648]: INFO nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Terminating instance [ 883.204521] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.204521] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.204521] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.275452] env[61648]: DEBUG nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 883.275718] env[61648]: DEBUG nova.compute.provider_tree [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 101 to 102 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 883.276121] env[61648]: DEBUG nova.compute.provider_tree [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.360575] env[61648]: DEBUG oslo_concurrency.lockutils [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] Releasing lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.361255] env[61648]: DEBUG nova.compute.manager [req-36560302-9cdb-4d47-a78c-8542ae64c6b3 req-261b3e59-d28a-407d-b3e3-385dfae2196d service nova] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Received event network-vif-deleted-d337f60c-a4b9-4593-97a0-7614a0295771 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.471241] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.720069] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.781842] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.661s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.782469] env[61648]: ERROR nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Traceback (most recent call last): [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self.driver.spawn(context, instance, image_meta, [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] vm_ref = self.build_virtual_machine(instance, [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 883.782469] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] for vif in network_info: [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] return self._sync_wrapper(fn, *args, **kwargs) [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self.wait() [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self[:] = self._gt.wait() [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] return self._exit_event.wait() [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] current.throw(*self._exc) [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 883.782800] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] result = function(*args, **kwargs) [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] return func(*args, **kwargs) [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] raise e [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] nwinfo = self.network_api.allocate_for_instance( [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] created_port_ids = self._update_ports_for_instance( [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] with excutils.save_and_reraise_exception(): [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] self.force_reraise() [ 883.783130] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] raise self.value [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] updated_port = self._update_port( [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] _ensure_no_port_binding_failure(port) [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] raise exception.PortBindingFailed(port_id=port['id']) [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] nova.exception.PortBindingFailed: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. [ 883.783447] env[61648]: ERROR nova.compute.manager [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] [ 883.783447] env[61648]: DEBUG nova.compute.utils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 883.784630] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.611s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.786062] env[61648]: INFO nova.compute.claims [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.792019] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Build of instance 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb was re-scheduled: Binding failed for port f89097cb-f6da-490f-bf27-bf292cbddd33, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 883.792019] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 883.792019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.792019] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.792340] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.811122] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.245319] env[61648]: DEBUG nova.compute.manager [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Received event network-changed-eec74a3c-8cd3-4070-8d1c-1bc8628319eb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.245580] env[61648]: DEBUG nova.compute.manager [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Refreshing instance network info cache due to event network-changed-eec74a3c-8cd3-4070-8d1c-1bc8628319eb. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.245661] env[61648]: DEBUG oslo_concurrency.lockutils [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] Acquiring lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.309253] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.313831] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.314239] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 884.314418] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.315068] env[61648]: DEBUG oslo_concurrency.lockutils [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] Acquired lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.315068] env[61648]: DEBUG nova.network.neutron [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Refreshing network info cache for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.315809] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7669302c-5dd3-44ed-b7d9-66fea7efbddb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.325205] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741ec4aa-198d-4818-b3c2-47b189bd4f8c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.347279] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c98567aa-d978-4b4d-9e01-25ab70246205 could not be found. [ 884.347279] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.347414] env[61648]: INFO nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Took 0.03 seconds to destroy the instance on the hypervisor. [ 884.347644] env[61648]: DEBUG oslo.service.loopingcall [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.347853] env[61648]: DEBUG nova.compute.manager [-] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.347939] env[61648]: DEBUG nova.network.neutron [-] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.370299] env[61648]: DEBUG nova.network.neutron [-] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.406369] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.876299] env[61648]: DEBUG nova.network.neutron [-] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.909975] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.910129] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 884.910314] env[61648]: DEBUG nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.910478] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.918838] env[61648]: DEBUG nova.network.neutron [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.934475] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.990371] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5daaf31-9a7c-4329-88e5-e7ec01093fd9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.998243] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c8b464-56dc-4429-9d9f-80c1b3ee0020 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.029233] env[61648]: DEBUG nova.network.neutron [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.030651] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d43319-fb82-493c-9a65-f46ac868089b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.037729] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b34d016-321b-4a8e-818e-8ffc00c187da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.051332] env[61648]: DEBUG nova.compute.provider_tree [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.378840] env[61648]: INFO nova.compute.manager [-] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Took 1.03 seconds to deallocate network for instance. [ 885.381242] env[61648]: DEBUG nova.compute.claims [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 885.381508] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.437218] env[61648]: DEBUG nova.network.neutron [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.534155] env[61648]: DEBUG oslo_concurrency.lockutils [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] Releasing lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.534425] env[61648]: DEBUG nova.compute.manager [req-0dee8226-1092-4ccb-b403-fb8ec01db466 req-dcadcabd-3e4a-41b6-ad34-619c968d152c service nova] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Received event network-vif-deleted-eec74a3c-8cd3-4070-8d1c-1bc8628319eb {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.554634] env[61648]: DEBUG nova.scheduler.client.report [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.939486] env[61648]: INFO nova.compute.manager [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb] Took 1.03 seconds to deallocate network for instance. [ 886.063022] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.275s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.063022] env[61648]: DEBUG nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 886.063998] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.107s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.570094] env[61648]: DEBUG nova.compute.utils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.579386] env[61648]: DEBUG nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Not allocating networking since 'none' was specified. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 886.740339] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a5aa53-c95e-4608-b692-42b373df6c3d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.749377] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4116f946-1874-4a34-b553-0899be8a2c16 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.787432] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6918b5ae-da69-4e4b-a664-c5e4a4c8f10a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.795178] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a4d85f-3d6c-4162-b8fc-e3eb26d96150 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.808945] env[61648]: DEBUG nova.compute.provider_tree [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.969093] env[61648]: INFO nova.scheduler.client.report [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Deleted allocations for instance 54f1dda7-fdfb-4322-897b-3a1e3ffed7fb [ 887.078608] env[61648]: DEBUG nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 887.314554] env[61648]: DEBUG nova.scheduler.client.report [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.478892] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f3083a8-b4ad-479a-bf52-dc4ad2e36b14 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "54f1dda7-fdfb-4322-897b-3a1e3ffed7fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.222s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.819789] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.756s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.820462] env[61648]: ERROR nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Traceback (most recent call last): [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self.driver.spawn(context, instance, image_meta, [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] vm_ref = self.build_virtual_machine(instance, [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] vif_infos = vmwarevif.get_vif_info(self._session, [ 887.820462] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] for vif in network_info: [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] return self._sync_wrapper(fn, *args, **kwargs) [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self.wait() [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self[:] = self._gt.wait() [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] return self._exit_event.wait() [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] current.throw(*self._exc) [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 887.820777] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] result = function(*args, **kwargs) [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] return func(*args, **kwargs) [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] raise e [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] nwinfo = self.network_api.allocate_for_instance( [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] created_port_ids = self._update_ports_for_instance( [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] with excutils.save_and_reraise_exception(): [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] self.force_reraise() [ 887.821109] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] raise self.value [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] updated_port = self._update_port( [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] _ensure_no_port_binding_failure(port) [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] raise exception.PortBindingFailed(port_id=port['id']) [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] nova.exception.PortBindingFailed: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. [ 887.821429] env[61648]: ERROR nova.compute.manager [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] [ 887.821429] env[61648]: DEBUG nova.compute.utils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 887.822659] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.432s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.826202] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Build of instance b3ae23d8-e28e-460c-b9a3-4744f81f39ec was re-scheduled: Binding failed for port ce7e1fc3-5337-4f86-ad1b-e4cb473bbd55, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 887.826630] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 887.826980] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquiring lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.827173] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Acquired lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.827342] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.981518] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 888.085922] env[61648]: DEBUG nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 888.117198] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.117432] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.117581] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.117766] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.117905] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.118058] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.118273] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.118428] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.118587] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.118741] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.120071] env[61648]: DEBUG nova.virt.hardware [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.120071] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95ac503-4be5-4ccd-b022-3edd84dfa759 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.127573] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb6a72f-028c-4c8e-a77c-ec6b41a0843f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.141992] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Instance VIF info [] {{(pid=61648) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.147591] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Creating folder: Project (a3ce8dd2ab684bdd858a91b591727a98). Parent ref: group-v285225. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.148195] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01e4774a-1a6d-4ea0-aec6-ed54360b1775 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.158319] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Created folder: Project (a3ce8dd2ab684bdd858a91b591727a98) in parent group-v285225. [ 888.162022] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Creating folder: Instances. Parent ref: group-v285251. {{(pid=61648) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.162022] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8c291de-ef09-4122-8b9a-98f62743beaa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.170117] env[61648]: INFO nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Created folder: Instances in parent group-v285251. [ 888.170117] env[61648]: DEBUG oslo.service.loopingcall [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.170117] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Creating VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.170117] env[61648]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2020357c-5ae7-441f-b29d-79a5875f379a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.184206] env[61648]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.184206] env[61648]: value = "task-1336705" [ 888.184206] env[61648]: _type = "Task" [ 888.184206] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.191455] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336705, 'name': CreateVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.316661] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "3c33a19e-211a-43f6-ae79-596f1c070a76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.317227] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "3c33a19e-211a-43f6-ae79-596f1c070a76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.353018] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.449838] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.504622] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04951bba-520b-4073-9ee2-32c239989cea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.508139] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.513044] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf79b8b8-2954-44e5-a855-2dcdb2faae0b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.542329] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807e85b9-eeb9-4687-a45a-6a04af3d189d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.549294] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d149b92-b8e7-4269-b85d-c48dfa26b37b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.562187] env[61648]: DEBUG nova.compute.provider_tree [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.695130] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336705, 'name': CreateVM_Task} progress is 99%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.952273] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Releasing lock "refresh_cache-b3ae23d8-e28e-460c-b9a3-4744f81f39ec" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.952700] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 888.952700] env[61648]: DEBUG nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 888.952931] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.968615] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.069050] env[61648]: DEBUG nova.scheduler.client.report [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.195158] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336705, 'name': CreateVM_Task} progress is 99%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.471541] env[61648]: DEBUG nova.network.neutron [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.575355] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.753s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.576016] env[61648]: ERROR nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Traceback (most recent call last): [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self.driver.spawn(context, instance, image_meta, [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self._vmops.spawn(context, instance, image_meta, injected_files, [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] vm_ref = self.build_virtual_machine(instance, [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] vif_infos = vmwarevif.get_vif_info(self._session, [ 889.576016] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] for vif in network_info: [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] return self._sync_wrapper(fn, *args, **kwargs) [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self.wait() [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self[:] = self._gt.wait() [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] return self._exit_event.wait() [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] current.throw(*self._exc) [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 889.576541] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] result = function(*args, **kwargs) [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] return func(*args, **kwargs) [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] raise e [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] nwinfo = self.network_api.allocate_for_instance( [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] created_port_ids = self._update_ports_for_instance( [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] with excutils.save_and_reraise_exception(): [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] self.force_reraise() [ 889.577071] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] raise self.value [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] updated_port = self._update_port( [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] _ensure_no_port_binding_failure(port) [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] raise exception.PortBindingFailed(port_id=port['id']) [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] nova.exception.PortBindingFailed: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. [ 889.577617] env[61648]: ERROR nova.compute.manager [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] [ 889.577617] env[61648]: DEBUG nova.compute.utils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 889.578031] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.940s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.581089] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Build of instance 34dc9640-9b39-4e3b-b8ca-7a29ab760992 was re-scheduled: Binding failed for port a325a575-6070-44f4-b327-2ea5459bfea9, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 889.581523] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 889.581762] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquiring lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.581917] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Acquired lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.586198] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.695543] env[61648]: DEBUG oslo_vmware.api [-] Task: {'id': task-1336705, 'name': CreateVM_Task, 'duration_secs': 1.237921} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.695721] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Created VM on the ESX host {{(pid=61648) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.696178] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.696372] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.696713] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.696990] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9a696e2-d7c5-49ba-8297-1a0947e03be9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.701349] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 889.701349] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52cc3f35-7f26-18b2-8bda-98a35fa5e008" [ 889.701349] env[61648]: _type = "Task" [ 889.701349] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.711384] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52cc3f35-7f26-18b2-8bda-98a35fa5e008, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.975201] env[61648]: INFO nova.compute.manager [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] [instance: b3ae23d8-e28e-460c-b9a3-4744f81f39ec] Took 1.02 seconds to deallocate network for instance. [ 890.113331] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.184252] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.215378] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52cc3f35-7f26-18b2-8bda-98a35fa5e008, 'name': SearchDatastore_Task, 'duration_secs': 0.010106} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.215891] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.216257] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Processing image a3243eb3-32d0-4887-afc7-2030d2340206 {{(pid=61648) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.216620] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.216881] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.217294] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.218293] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-714bc32c-b255-4fcc-b234-4644049b2bf8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.226677] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61648) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.226993] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61648) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.230253] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f1da849-c771-425a-869b-4302c3ba02d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.236407] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 890.236407] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]52281b4b-5131-70cf-df82-7415bfa0439d" [ 890.236407] env[61648]: _type = "Task" [ 890.236407] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.244754] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52281b4b-5131-70cf-df82-7415bfa0439d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.293149] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283adc5d-3708-4a60-b716-6f7a4f381879 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.300098] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e5059e-483b-4152-b524-50186b658ab6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.332124] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c9039b-63c8-48db-a10a-a93ea88f54ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.340617] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fed2e34-cbac-4c34-a93a-1dfa0f14eafe {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.352923] env[61648]: DEBUG nova.compute.provider_tree [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.690676] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Releasing lock "refresh_cache-34dc9640-9b39-4e3b-b8ca-7a29ab760992" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.690676] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 890.690676] env[61648]: DEBUG nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.690676] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.705319] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.746759] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]52281b4b-5131-70cf-df82-7415bfa0439d, 'name': SearchDatastore_Task, 'duration_secs': 0.008623} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.747685] env[61648]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5cd17c5-dbc0-485d-887c-04fdf02d848a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.753315] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 890.753315] env[61648]: value = "session[5278a596-0338-d9e4-40ee-e8205fef64a3]5297ec40-e8e2-4d4d-255a-3016a4dec707" [ 890.753315] env[61648]: _type = "Task" [ 890.753315] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.760703] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5297ec40-e8e2-4d4d-255a-3016a4dec707, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.858018] env[61648]: DEBUG nova.scheduler.client.report [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.002496] env[61648]: INFO nova.scheduler.client.report [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Deleted allocations for instance b3ae23d8-e28e-460c-b9a3-4744f81f39ec [ 891.207837] env[61648]: DEBUG nova.network.neutron [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.263918] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': session[5278a596-0338-d9e4-40ee-e8205fef64a3]5297ec40-e8e2-4d4d-255a-3016a4dec707, 'name': SearchDatastore_Task, 'duration_secs': 0.008271} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.264390] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.264806] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] e4adb624-e900-4838-a5c5-2cd0d488f458/e4adb624-e900-4838-a5c5-2cd0d488f458.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 891.265229] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-850dc74c-a145-47cf-a6f2-e79a1bbff3b8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.271349] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 891.271349] env[61648]: value = "task-1336706" [ 891.271349] env[61648]: _type = "Task" [ 891.271349] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.282926] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.364017] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.783s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.364017] env[61648]: ERROR nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Traceback (most recent call last): [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self.driver.spawn(context, instance, image_meta, [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 891.364017] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] vm_ref = self.build_virtual_machine(instance, [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] vif_infos = vmwarevif.get_vif_info(self._session, [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] for vif in network_info: [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] return self._sync_wrapper(fn, *args, **kwargs) [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self.wait() [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self[:] = self._gt.wait() [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] return self._exit_event.wait() [ 891.364367] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] current.throw(*self._exc) [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] result = function(*args, **kwargs) [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] return func(*args, **kwargs) [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] raise e [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] nwinfo = self.network_api.allocate_for_instance( [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] created_port_ids = self._update_ports_for_instance( [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 891.364701] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] with excutils.save_and_reraise_exception(): [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] self.force_reraise() [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] raise self.value [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] updated_port = self._update_port( [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] _ensure_no_port_binding_failure(port) [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] raise exception.PortBindingFailed(port_id=port['id']) [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] nova.exception.PortBindingFailed: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. [ 891.365046] env[61648]: ERROR nova.compute.manager [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] [ 891.365362] env[61648]: DEBUG nova.compute.utils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 891.365786] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.489s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.369082] env[61648]: INFO nova.compute.claims [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.370646] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Build of instance 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a was re-scheduled: Binding failed for port a9db426e-3abb-46bd-864b-92ed5e4b6f9d, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 891.371283] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 891.371643] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.371958] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquired lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.372263] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.514459] env[61648]: DEBUG oslo_concurrency.lockutils [None req-3451ce93-ae6a-4cb6-8044-da72bc708e6c tempest-AttachVolumeShelveTestJSON-944550801 tempest-AttachVolumeShelveTestJSON-944550801-project-member] Lock "b3ae23d8-e28e-460c-b9a3-4744f81f39ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.086s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.712021] env[61648]: INFO nova.compute.manager [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] [instance: 34dc9640-9b39-4e3b-b8ca-7a29ab760992] Took 1.02 seconds to deallocate network for instance. [ 891.785915] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336706, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452242} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.785915] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3243eb3-32d0-4887-afc7-2030d2340206/a3243eb3-32d0-4887-afc7-2030d2340206.vmdk to [datastore2] e4adb624-e900-4838-a5c5-2cd0d488f458/e4adb624-e900-4838-a5c5-2cd0d488f458.vmdk {{(pid=61648) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.785915] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Extending root virtual disk to 1048576 {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.785915] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0caddf05-dfab-4511-b085-dd239861f222 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.792020] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 891.792020] env[61648]: value = "task-1336707" [ 891.792020] env[61648]: _type = "Task" [ 891.792020] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.797347] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.894706] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.989201] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.018881] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 892.302081] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059576} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.302081] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Extended root virtual disk {{(pid=61648) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.302081] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5244271f-9438-4ee6-8340-ccb3260efe04 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.322157] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] e4adb624-e900-4838-a5c5-2cd0d488f458/e4adb624-e900-4838-a5c5-2cd0d488f458.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.323324] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f095aa98-1e55-4a79-82f4-a6620049eb25 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.346340] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 892.346340] env[61648]: value = "task-1336708" [ 892.346340] env[61648]: _type = "Task" [ 892.346340] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.354305] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.491018] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Releasing lock "refresh_cache-4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.491350] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 892.491597] env[61648]: DEBUG nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 892.491836] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.509221] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.533734] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.538934] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d214ffc-e61c-4653-95f9-5dd95e56d643 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.548306] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35011fc-99ed-4413-b5da-9323bb11de66 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.580811] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125c1687-cbbd-4f9c-a388-5bf54b5a19d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.588089] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f1bb42-921a-4a4b-9825-f608db698f33 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.601517] env[61648]: DEBUG nova.compute.provider_tree [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 892.740736] env[61648]: INFO nova.scheduler.client.report [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Deleted allocations for instance 34dc9640-9b39-4e3b-b8ca-7a29ab760992 [ 892.853855] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336708, 'name': ReconfigVM_Task, 'duration_secs': 0.251463} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.854173] env[61648]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Reconfigured VM instance instance-0000004f to attach disk [datastore2] e4adb624-e900-4838-a5c5-2cd0d488f458/e4adb624-e900-4838-a5c5-2cd0d488f458.vmdk or device None with type sparse {{(pid=61648) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.854762] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21ffafca-ea51-4f62-aa59-0381af0b68c0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.864022] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 892.864022] env[61648]: value = "task-1336709" [ 892.864022] env[61648]: _type = "Task" [ 892.864022] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.868992] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336709, 'name': Rename_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.012189] env[61648]: DEBUG nova.network.neutron [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.122536] env[61648]: ERROR nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [req-f5470ec3-a46d-4674-9c93-94710808207b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f5470ec3-a46d-4674-9c93-94710808207b"}]} [ 893.144873] env[61648]: DEBUG nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 893.160458] env[61648]: DEBUG nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 893.160707] env[61648]: DEBUG nova.compute.provider_tree [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.173641] env[61648]: DEBUG nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 893.197162] env[61648]: DEBUG nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 893.248626] env[61648]: DEBUG oslo_concurrency.lockutils [None req-10cd1f5e-c7d3-49dc-afbc-5744af6a8102 tempest-ServerGroupTestJSON-1345283330 tempest-ServerGroupTestJSON-1345283330-project-member] Lock "34dc9640-9b39-4e3b-b8ca-7a29ab760992" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.572s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.376652] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336709, 'name': Rename_Task, 'duration_secs': 0.131543} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.376983] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Powering on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.377309] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8a329ed-6f19-426a-b189-c78de877b21a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.381964] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff47ca07-1a74-4678-94d4-dcaa5fdaadbd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.394642] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c77f96-0322-43e1-bb18-05a37ca927e4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.398354] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 893.398354] env[61648]: value = "task-1336710" [ 893.398354] env[61648]: _type = "Task" [ 893.398354] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.429336] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6e5f73-ed55-4156-98e0-7dcea0575692 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.435930] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336710, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.441058] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8d0b44-457a-48cf-86f4-bcdd83ec0a4a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.458169] env[61648]: DEBUG nova.compute.provider_tree [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.516366] env[61648]: INFO nova.compute.manager [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a] Took 1.02 seconds to deallocate network for instance. [ 893.752089] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 893.909703] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336710, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.997431] env[61648]: DEBUG nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 893.997597] env[61648]: DEBUG nova.compute.provider_tree [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 103 to 104 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 893.997729] env[61648]: DEBUG nova.compute.provider_tree [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 894.285094] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.415025] env[61648]: DEBUG oslo_vmware.api [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336710, 'name': PowerOnVM_Task, 'duration_secs': 0.567083} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.415025] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Powered on the VM {{(pid=61648) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.415025] env[61648]: INFO nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Took 6.33 seconds to spawn the instance on the hypervisor. [ 894.415025] env[61648]: DEBUG nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 894.415025] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0198879e-c746-476c-9a75-1169899c3209 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.504131] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.139s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.504339] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 894.508067] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.776s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.513724] env[61648]: INFO nova.compute.claims [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.556020] env[61648]: INFO nova.scheduler.client.report [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Deleted allocations for instance 4d83ac61-454a-4c3f-b1cf-b8a1be5c501a [ 894.932303] env[61648]: INFO nova.compute.manager [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Took 33.80 seconds to build instance. [ 895.019216] env[61648]: DEBUG nova.compute.utils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 895.023651] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 895.023849] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.068799] env[61648]: DEBUG oslo_concurrency.lockutils [None req-f7d34015-75d2-4dad-8894-9ee9ed51032d tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "4d83ac61-454a-4c3f-b1cf-b8a1be5c501a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.636s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.082435] env[61648]: DEBUG nova.policy [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37668165a76144f8bc79499ddcfadf12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11eef571954e4bc3b42d8de257f5b18f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 895.399470] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Successfully created port: 38c486f9-8e74-4f48-839f-0f3377724cf1 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.436024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6f7e425c-afae-4870-a9ab-6929db1ad86e tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "e4adb624-e900-4838-a5c5-2cd0d488f458" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.591s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.524426] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 895.579734] env[61648]: DEBUG nova.compute.manager [None req-b5e327d6-3c4f-4c4d-8d4c-83320ad6339a tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Checking state {{(pid=61648) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 895.580565] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cc4750-b8e0-46f2-bafc-d75802abf0a1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.657856] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "e4adb624-e900-4838-a5c5-2cd0d488f458" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.658220] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "e4adb624-e900-4838-a5c5-2cd0d488f458" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.658341] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "e4adb624-e900-4838-a5c5-2cd0d488f458-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.658522] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "e4adb624-e900-4838-a5c5-2cd0d488f458-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.658682] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "e4adb624-e900-4838-a5c5-2cd0d488f458-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.662873] env[61648]: INFO nova.compute.manager [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Terminating instance [ 895.665163] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "refresh_cache-e4adb624-e900-4838-a5c5-2cd0d488f458" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.665163] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquired lock "refresh_cache-e4adb624-e900-4838-a5c5-2cd0d488f458" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.665163] env[61648]: DEBUG nova.network.neutron [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.725377] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083f3c18-9748-4441-b11b-6c2f630a646e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.735040] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4354ea37-4f33-402b-825d-b99fe2dc1d2c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.765028] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceae1d32-aafc-4d8c-8e97-8dc8b7c93e07 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.772032] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e25934c-aa79-4c21-bec9-7d8ead5370cc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.784846] env[61648]: DEBUG nova.compute.provider_tree [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.096581] env[61648]: INFO nova.compute.manager [None req-b5e327d6-3c4f-4c4d-8d4c-83320ad6339a tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] instance snapshotting [ 896.097225] env[61648]: DEBUG nova.objects.instance [None req-b5e327d6-3c4f-4c4d-8d4c-83320ad6339a tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lazy-loading 'flavor' on Instance uuid e4adb624-e900-4838-a5c5-2cd0d488f458 {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.196146] env[61648]: DEBUG nova.network.neutron [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.269064] env[61648]: DEBUG nova.network.neutron [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.288934] env[61648]: DEBUG nova.scheduler.client.report [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.373017] env[61648]: DEBUG nova.compute.manager [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Received event network-changed-38c486f9-8e74-4f48-839f-0f3377724cf1 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.374097] env[61648]: DEBUG nova.compute.manager [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Refreshing instance network info cache due to event network-changed-38c486f9-8e74-4f48-839f-0f3377724cf1. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 896.374489] env[61648]: DEBUG oslo_concurrency.lockutils [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] Acquiring lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.375257] env[61648]: DEBUG oslo_concurrency.lockutils [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] Acquired lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.375498] env[61648]: DEBUG nova.network.neutron [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Refreshing network info cache for port 38c486f9-8e74-4f48-839f-0f3377724cf1 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.537908] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 896.567486] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.567735] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.567909] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.568132] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.568285] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.568429] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.568663] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.568819] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.568984] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.569384] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.569607] env[61648]: DEBUG nova.virt.hardware [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.570543] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f46eac9-50e7-455e-849b-c5975c9442af {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.578907] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51980821-10d1-424d-8b49-2334559d88d7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.604192] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c1f6f7-bd50-45a7-9ff3-f2e4cfd2f1e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.621515] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282b4247-4751-4084-8d1e-d7a2953d3cfb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.775837] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Releasing lock "refresh_cache-e4adb624-e900-4838-a5c5-2cd0d488f458" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.776314] env[61648]: DEBUG nova.compute.manager [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 896.776503] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.777383] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f3b0a1-66fe-4508-b1ee-392f3baaaab7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.785726] env[61648]: ERROR nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 896.785726] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.785726] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 896.785726] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 896.785726] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.785726] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.785726] env[61648]: ERROR nova.compute.manager raise self.value [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 896.785726] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 896.785726] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.785726] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 896.786704] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.786704] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 896.786704] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 896.786704] env[61648]: ERROR nova.compute.manager [ 896.786704] env[61648]: Traceback (most recent call last): [ 896.786704] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 896.786704] env[61648]: listener.cb(fileno) [ 896.786704] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 896.786704] env[61648]: result = function(*args, **kwargs) [ 896.786704] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 896.786704] env[61648]: return func(*args, **kwargs) [ 896.786704] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 896.786704] env[61648]: raise e [ 896.786704] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.786704] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 896.786704] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 896.786704] env[61648]: created_port_ids = self._update_ports_for_instance( [ 896.786704] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 896.786704] env[61648]: with excutils.save_and_reraise_exception(): [ 896.786704] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.786704] env[61648]: self.force_reraise() [ 896.786704] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.786704] env[61648]: raise self.value [ 896.786704] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 896.786704] env[61648]: updated_port = self._update_port( [ 896.786704] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.786704] env[61648]: _ensure_no_port_binding_failure(port) [ 896.786704] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.786704] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 896.788543] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 896.788543] env[61648]: Removing descriptor: 19 [ 896.788543] env[61648]: ERROR nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Traceback (most recent call last): [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] yield resources [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self.driver.spawn(context, instance, image_meta, [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 896.788543] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] vm_ref = self.build_virtual_machine(instance, [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] vif_infos = vmwarevif.get_vif_info(self._session, [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] for vif in network_info: [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return self._sync_wrapper(fn, *args, **kwargs) [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self.wait() [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self[:] = self._gt.wait() [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return self._exit_event.wait() [ 896.789342] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] result = hub.switch() [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return self.greenlet.switch() [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] result = function(*args, **kwargs) [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return func(*args, **kwargs) [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] raise e [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] nwinfo = self.network_api.allocate_for_instance( [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 896.790256] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] created_port_ids = self._update_ports_for_instance( [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] with excutils.save_and_reraise_exception(): [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self.force_reraise() [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] raise self.value [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] updated_port = self._update_port( [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] _ensure_no_port_binding_failure(port) [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.791065] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] raise exception.PortBindingFailed(port_id=port['id']) [ 896.791714] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 896.791714] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] [ 896.791714] env[61648]: INFO nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Terminating instance [ 896.796694] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.797544] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 896.802177] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Powering off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.802715] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquiring lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.802946] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.372s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.805657] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a67c0a0-e2f1-4d34-99d9-cea1ff770343 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.815903] env[61648]: DEBUG oslo_vmware.api [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 896.815903] env[61648]: value = "task-1336711" [ 896.815903] env[61648]: _type = "Task" [ 896.815903] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.821487] env[61648]: DEBUG oslo_vmware.api [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336711, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.904512] env[61648]: DEBUG nova.network.neutron [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.106085] env[61648]: DEBUG nova.network.neutron [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.132061] env[61648]: DEBUG nova.compute.manager [None req-b5e327d6-3c4f-4c4d-8d4c-83320ad6339a tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Instance disappeared during snapshot {{(pid=61648) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 897.303345] env[61648]: DEBUG nova.compute.manager [None req-b5e327d6-3c4f-4c4d-8d4c-83320ad6339a tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Found 0 images (rotation: 2) {{(pid=61648) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 897.312185] env[61648]: DEBUG nova.compute.utils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.313699] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 897.313857] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.333146] env[61648]: DEBUG oslo_vmware.api [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336711, 'name': PowerOffVM_Task, 'duration_secs': 0.12778} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.333470] env[61648]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Powered off the VM {{(pid=61648) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.333640] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Unregistering the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.333887] env[61648]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a21f50d7-3abb-4fa7-a553-3b10454f4c84 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.362440] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Unregistered the VM {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 897.362675] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Deleting contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 897.362856] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Deleting the datastore file [datastore2] e4adb624-e900-4838-a5c5-2cd0d488f458 {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.363182] env[61648]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6eb4fcca-8c7c-431b-ac91-f145f140cd79 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.373312] env[61648]: DEBUG oslo_vmware.api [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for the task: (returnval){ [ 897.373312] env[61648]: value = "task-1336713" [ 897.373312] env[61648]: _type = "Task" [ 897.373312] env[61648]: } to complete. {{(pid=61648) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.382675] env[61648]: DEBUG oslo_vmware.api [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.420396] env[61648]: DEBUG nova.policy [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1640f7eea62d4cecb3957335c02d4e3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'acae4fa055d943c4abab9264a1f1683f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 897.465691] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73194aee-25c4-4366-8929-e48902622cbf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.475607] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c00372-51d4-4e2d-91c7-df2419014ce7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.507192] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2c8116-5465-42a5-9d14-d9f6c75c975c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.514598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61804eba-9768-4017-841b-570890b42749 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.527701] env[61648]: DEBUG nova.compute.provider_tree [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.609025] env[61648]: DEBUG oslo_concurrency.lockutils [req-3c1dae85-36e3-46d9-bb46-7d31c5566758 req-ddb2d025-5b7b-49cf-8313-cf6700b72d6a service nova] Releasing lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.609455] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquired lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.609644] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.817814] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 897.887721] env[61648]: DEBUG oslo_vmware.api [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Task: {'id': task-1336713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095975} completed successfully. {{(pid=61648) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.888020] env[61648]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Deleted the datastore file {{(pid=61648) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.888200] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Deleted contents of the VM from datastore datastore2 {{(pid=61648) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 897.888436] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 897.888533] env[61648]: INFO nova.compute.manager [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Took 1.11 seconds to destroy the instance on the hypervisor. [ 897.888766] env[61648]: DEBUG oslo.service.loopingcall [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.888964] env[61648]: DEBUG nova.compute.manager [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 897.889065] env[61648]: DEBUG nova.network.neutron [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 897.907635] env[61648]: DEBUG nova.network.neutron [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.053863] env[61648]: ERROR nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [req-1239deed-7547-4f40-855d-3e889c9e37a6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1239deed-7547-4f40-855d-3e889c9e37a6"}]}: nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 898.073836] env[61648]: DEBUG nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 898.093173] env[61648]: DEBUG nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 898.093379] env[61648]: DEBUG nova.compute.provider_tree [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.106799] env[61648]: DEBUG nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 898.125219] env[61648]: DEBUG nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 898.129874] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.143543] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Successfully created port: d773c4c9-9656-4a2a-8e15-8f7430160d6d {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.268360] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.397093] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d37b1a2-b099-4013-8868-66e814ecdcfc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.406175] env[61648]: DEBUG nova.compute.manager [req-ffe86498-632d-4faf-a399-a6a9fe90a988 req-4c235091-72a1-45d4-91b2-f094757a7ec4 service nova] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Received event network-vif-deleted-38c486f9-8e74-4f48-839f-0f3377724cf1 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.410681] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21aa02d-3c8b-4a1b-acb8-676950d851c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.417038] env[61648]: DEBUG nova.network.neutron [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.447608] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16990308-6840-4a4d-b867-bdbdc34a28fd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.457097] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d86a9a-8cf5-4294-812b-753b91fbaf05 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.474028] env[61648]: DEBUG nova.compute.provider_tree [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.484461] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "21e36276-c4d1-4941-a216-22fee34dcb29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.484706] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "21e36276-c4d1-4941-a216-22fee34dcb29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.770901] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Releasing lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.771574] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 898.771574] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.771876] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59d702a2-906b-4271-acf1-b4c61c4c7637 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.781018] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8d3379-be40-4831-91fd-2046ac46a6d1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.806373] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b1cd355c-ea96-4ff9-aa40-6605c8b73e3b could not be found. [ 898.806794] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.807020] env[61648]: INFO nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 898.807262] env[61648]: DEBUG oslo.service.loopingcall [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.807481] env[61648]: DEBUG nova.compute.manager [-] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 898.807574] env[61648]: DEBUG nova.network.neutron [-] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 898.822848] env[61648]: DEBUG nova.network.neutron [-] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.830911] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 898.856796] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.857043] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.857200] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.857376] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.857516] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.857656] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.857855] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.858032] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.858278] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.858442] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.858655] env[61648]: DEBUG nova.virt.hardware [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.861300] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feec0c3-4ead-49b0-8e0b-0fc02bbfedb2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.870592] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e77b7f8-6f40-411d-9228-405f0a551f58 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.916793] env[61648]: INFO nova.compute.manager [-] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Took 1.03 seconds to deallocate network for instance. [ 898.987858] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 899.011518] env[61648]: DEBUG nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 899.011708] env[61648]: DEBUG nova.compute.provider_tree [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 105 to 106 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 899.011880] env[61648]: DEBUG nova.compute.provider_tree [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.326691] env[61648]: DEBUG nova.network.neutron [-] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.424613] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.515130] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.517359] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.714s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.518046] env[61648]: ERROR nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Traceback (most recent call last): [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self.driver.spawn(context, instance, image_meta, [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self._vmops.spawn(context, instance, image_meta, injected_files, [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] vm_ref = self.build_virtual_machine(instance, [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] vif_infos = vmwarevif.get_vif_info(self._session, [ 899.518046] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] for vif in network_info: [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] return self._sync_wrapper(fn, *args, **kwargs) [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self.wait() [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self[:] = self._gt.wait() [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] return self._exit_event.wait() [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] current.throw(*self._exc) [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 899.518403] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] result = function(*args, **kwargs) [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] return func(*args, **kwargs) [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] raise e [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] nwinfo = self.network_api.allocate_for_instance( [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] created_port_ids = self._update_ports_for_instance( [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] with excutils.save_and_reraise_exception(): [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] self.force_reraise() [ 899.518806] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] raise self.value [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] updated_port = self._update_port( [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] _ensure_no_port_binding_failure(port) [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] raise exception.PortBindingFailed(port_id=port['id']) [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] nova.exception.PortBindingFailed: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. [ 899.519190] env[61648]: ERROR nova.compute.manager [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] [ 899.519190] env[61648]: DEBUG nova.compute.utils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 899.523294] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.355s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.526804] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Build of instance 03f59be3-f1bb-4e3a-96ea-7b39de515397 was re-scheduled: Binding failed for port 1965ab3f-57b4-41f5-bea6-409d80b49b2a, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 899.527256] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 899.527478] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.527623] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.527776] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.780952] env[61648]: ERROR nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 899.780952] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.780952] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 899.780952] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 899.780952] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.780952] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.780952] env[61648]: ERROR nova.compute.manager raise self.value [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 899.780952] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 899.780952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.780952] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 899.781615] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.781615] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 899.781615] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 899.781615] env[61648]: ERROR nova.compute.manager [ 899.781615] env[61648]: Traceback (most recent call last): [ 899.781615] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 899.781615] env[61648]: listener.cb(fileno) [ 899.781615] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 899.781615] env[61648]: result = function(*args, **kwargs) [ 899.781615] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 899.781615] env[61648]: return func(*args, **kwargs) [ 899.781615] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 899.781615] env[61648]: raise e [ 899.781615] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.781615] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 899.781615] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 899.781615] env[61648]: created_port_ids = self._update_ports_for_instance( [ 899.781615] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 899.781615] env[61648]: with excutils.save_and_reraise_exception(): [ 899.781615] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.781615] env[61648]: self.force_reraise() [ 899.781615] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.781615] env[61648]: raise self.value [ 899.781615] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 899.781615] env[61648]: updated_port = self._update_port( [ 899.781615] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.781615] env[61648]: _ensure_no_port_binding_failure(port) [ 899.781615] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.781615] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 899.782412] env[61648]: nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 899.782412] env[61648]: Removing descriptor: 19 [ 899.782412] env[61648]: ERROR nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Traceback (most recent call last): [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] yield resources [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self.driver.spawn(context, instance, image_meta, [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self._vmops.spawn(context, instance, image_meta, injected_files, [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 899.782412] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] vm_ref = self.build_virtual_machine(instance, [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] vif_infos = vmwarevif.get_vif_info(self._session, [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] for vif in network_info: [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return self._sync_wrapper(fn, *args, **kwargs) [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self.wait() [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self[:] = self._gt.wait() [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return self._exit_event.wait() [ 899.782730] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] result = hub.switch() [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return self.greenlet.switch() [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] result = function(*args, **kwargs) [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return func(*args, **kwargs) [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] raise e [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] nwinfo = self.network_api.allocate_for_instance( [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 899.783103] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] created_port_ids = self._update_ports_for_instance( [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] with excutils.save_and_reraise_exception(): [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self.force_reraise() [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] raise self.value [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] updated_port = self._update_port( [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] _ensure_no_port_binding_failure(port) [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 899.783571] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] raise exception.PortBindingFailed(port_id=port['id']) [ 899.783944] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 899.783944] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] [ 899.783944] env[61648]: INFO nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Terminating instance [ 899.785081] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.785081] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquired lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.785081] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.830910] env[61648]: INFO nova.compute.manager [-] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Took 1.02 seconds to deallocate network for instance. [ 899.832403] env[61648]: DEBUG nova.compute.claims [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 899.832724] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.067106] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.157236] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.235559] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de77b31d-6fe4-4f7b-aa3e-8fd88fe208a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.243759] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85ed48a-cb44-426c-adbe-d658028c0101 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.275741] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a84537f-90c2-454b-816b-3d20f5361299 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.283184] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fe16cc-9dfb-4267-aac9-b259e3de7518 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.298209] env[61648]: DEBUG nova.compute.provider_tree [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.310170] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.406147] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.438033] env[61648]: DEBUG nova.compute.manager [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Received event network-changed-d773c4c9-9656-4a2a-8e15-8f7430160d6d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.438337] env[61648]: DEBUG nova.compute.manager [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Refreshing instance network info cache due to event network-changed-d773c4c9-9656-4a2a-8e15-8f7430160d6d. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 900.438579] env[61648]: DEBUG oslo_concurrency.lockutils [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] Acquiring lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.664109] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-03f59be3-f1bb-4e3a-96ea-7b39de515397" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.664220] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 900.664785] env[61648]: DEBUG nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 900.664785] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.695535] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.728372] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquiring lock "e721a05d-86fc-4c0e-839a-107d34ec9cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.728608] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Lock "e721a05d-86fc-4c0e-839a-107d34ec9cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.801513] env[61648]: DEBUG nova.scheduler.client.report [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.909173] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Releasing lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.909636] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 900.909795] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.910128] env[61648]: DEBUG oslo_concurrency.lockutils [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] Acquired lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.910303] env[61648]: DEBUG nova.network.neutron [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Refreshing network info cache for port d773c4c9-9656-4a2a-8e15-8f7430160d6d {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.911399] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba939ccb-6779-4a3c-97d9-186f8d35a5a1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.923658] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72748f67-7638-40d5-be5c-39db2c49f762 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.952038] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3002571b-4800-48a9-84c1-68f6d3e0cc70 could not be found. [ 900.952309] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.952491] env[61648]: INFO nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Took 0.04 seconds to destroy the instance on the hypervisor. [ 900.952726] env[61648]: DEBUG oslo.service.loopingcall [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.952933] env[61648]: DEBUG nova.compute.manager [-] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 900.953047] env[61648]: DEBUG nova.network.neutron [-] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.970689] env[61648]: DEBUG nova.network.neutron [-] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.199757] env[61648]: DEBUG nova.network.neutron [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.232995] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 901.310036] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.784s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.310036] env[61648]: ERROR nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Traceback (most recent call last): [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self.driver.spawn(context, instance, image_meta, [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 901.310036] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] vm_ref = self.build_virtual_machine(instance, [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] vif_infos = vmwarevif.get_vif_info(self._session, [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] for vif in network_info: [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] return self._sync_wrapper(fn, *args, **kwargs) [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self.wait() [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self[:] = self._gt.wait() [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] return self._exit_event.wait() [ 901.310426] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] current.throw(*self._exc) [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] result = function(*args, **kwargs) [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] return func(*args, **kwargs) [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] raise e [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] nwinfo = self.network_api.allocate_for_instance( [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] created_port_ids = self._update_ports_for_instance( [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 901.310820] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] with excutils.save_and_reraise_exception(): [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] self.force_reraise() [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] raise self.value [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] updated_port = self._update_port( [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] _ensure_no_port_binding_failure(port) [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] raise exception.PortBindingFailed(port_id=port['id']) [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] nova.exception.PortBindingFailed: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. [ 901.311228] env[61648]: ERROR nova.compute.manager [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] [ 901.311590] env[61648]: DEBUG nova.compute.utils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 901.311590] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.839s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.311590] env[61648]: DEBUG nova.objects.instance [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lazy-loading 'resources' on Instance uuid 32a2c7ce-2980-4eac-ad52-b8d5d67d669b {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.314309] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Build of instance d37aad4c-f4e9-40ab-a250-5dd3924f305c was re-scheduled: Binding failed for port d337f60c-a4b9-4593-97a0-7614a0295771, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 901.314309] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 901.314309] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.314309] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquired lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.314552] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.432307] env[61648]: DEBUG nova.network.neutron [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.475100] env[61648]: DEBUG nova.network.neutron [-] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.532010] env[61648]: DEBUG nova.network.neutron [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.701549] env[61648]: INFO nova.compute.manager [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 03f59be3-f1bb-4e3a-96ea-7b39de515397] Took 1.04 seconds to deallocate network for instance. [ 901.753554] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.836217] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.912700] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.969406] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0eb6c81-52f6-484c-83d5-fe7433782a18 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.977267] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4ffef0-943f-443f-959b-2b2a04f12bcf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.980347] env[61648]: INFO nova.compute.manager [-] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Took 1.03 seconds to deallocate network for instance. [ 901.982831] env[61648]: DEBUG nova.compute.claims [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 901.983011] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.009697] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41edbc0-73ad-4b55-ad34-fc22eb2f32b0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.016471] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a5ccb8-1d98-44f0-a7c9-b6d8dffc85e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.029058] env[61648]: DEBUG nova.compute.provider_tree [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.035163] env[61648]: DEBUG oslo_concurrency.lockutils [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] Releasing lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.035402] env[61648]: DEBUG nova.compute.manager [req-1a259256-a8fb-44cf-92e8-5f4dbd80fa89 req-dafe2d7b-1cd9-42d3-8dba-648a6145b06a service nova] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Received event network-vif-deleted-d773c4c9-9656-4a2a-8e15-8f7430160d6d {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.417580] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Releasing lock "refresh_cache-d37aad4c-f4e9-40ab-a250-5dd3924f305c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.418242] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 902.418685] env[61648]: DEBUG nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 902.418728] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.436033] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.531994] env[61648]: DEBUG nova.scheduler.client.report [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.731086] env[61648]: INFO nova.scheduler.client.report [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Deleted allocations for instance 03f59be3-f1bb-4e3a-96ea-7b39de515397 [ 902.941054] env[61648]: DEBUG nova.network.neutron [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.037086] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.039616] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.658s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.054776] env[61648]: INFO nova.scheduler.client.report [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Deleted allocations for instance 32a2c7ce-2980-4eac-ad52-b8d5d67d669b [ 903.239170] env[61648]: DEBUG oslo_concurrency.lockutils [None req-034aa073-0550-439e-aca9-602398f4b354 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "03f59be3-f1bb-4e3a-96ea-7b39de515397" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.553s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.450060] env[61648]: INFO nova.compute.manager [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: d37aad4c-f4e9-40ab-a250-5dd3924f305c] Took 1.03 seconds to deallocate network for instance. [ 903.562957] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0dea5d-05eb-4902-837a-c588e1672d2d tempest-ServerShowV247Test-389737757 tempest-ServerShowV247Test-389737757-project-member] Lock "32a2c7ce-2980-4eac-ad52-b8d5d67d669b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.806s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.707152] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a55bb7-e009-4a8c-9809-c5c1c8a768de {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.713493] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c84f47-d52f-485d-9608-19ee56999b48 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.751328] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2987002b-f644-4e5a-bc06-ab09158704d3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.761045] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e6bf8b-c2b5-4977-9f3a-21d8f26a1462 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.775569] env[61648]: DEBUG nova.compute.provider_tree [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.279210] env[61648]: DEBUG nova.scheduler.client.report [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.404842] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "e7c41d2a-1aed-44e9-959b-2369a8d66547" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.404842] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "e7c41d2a-1aed-44e9-959b-2369a8d66547" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.482182] env[61648]: INFO nova.scheduler.client.report [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Deleted allocations for instance d37aad4c-f4e9-40ab-a250-5dd3924f305c [ 904.783808] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.744s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.784451] env[61648]: ERROR nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Traceback (most recent call last): [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self.driver.spawn(context, instance, image_meta, [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self._vmops.spawn(context, instance, image_meta, injected_files, [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] vm_ref = self.build_virtual_machine(instance, [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] vif_infos = vmwarevif.get_vif_info(self._session, [ 904.784451] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] for vif in network_info: [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] return self._sync_wrapper(fn, *args, **kwargs) [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self.wait() [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self[:] = self._gt.wait() [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] return self._exit_event.wait() [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] current.throw(*self._exc) [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 904.784782] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] result = function(*args, **kwargs) [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] return func(*args, **kwargs) [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] raise e [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] nwinfo = self.network_api.allocate_for_instance( [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] created_port_ids = self._update_ports_for_instance( [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] with excutils.save_and_reraise_exception(): [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] self.force_reraise() [ 904.785170] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] raise self.value [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] updated_port = self._update_port( [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] _ensure_no_port_binding_failure(port) [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] raise exception.PortBindingFailed(port_id=port['id']) [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] nova.exception.PortBindingFailed: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. [ 904.785621] env[61648]: ERROR nova.compute.manager [instance: c98567aa-d978-4b4d-9e01-25ab70246205] [ 904.785621] env[61648]: DEBUG nova.compute.utils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 904.786398] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.278s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.787900] env[61648]: INFO nova.compute.claims [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.791026] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Build of instance c98567aa-d978-4b4d-9e01-25ab70246205 was re-scheduled: Binding failed for port eec74a3c-8cd3-4070-8d1c-1bc8628319eb, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 904.791441] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 904.791657] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.791799] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.791951] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.906149] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 904.990271] env[61648]: DEBUG oslo_concurrency.lockutils [None req-928d9e40-488a-4c5c-82d9-9262e14be351 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "d37aad4c-f4e9-40ab-a250-5dd3924f305c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.577s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.314209] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.407436] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.429899] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.910248] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-c98567aa-d978-4b4d-9e01-25ab70246205" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.910477] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 905.910647] env[61648]: DEBUG nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 905.911271] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 905.930167] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.953290] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cb2a98-1ef1-46c2-bfee-b893032d29da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.962875] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddd546e-295f-4927-99cb-be78709e428e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.999660] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3adac2-f8a1-4520-bc15-9dc6576746bf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.007229] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1889b229-3b9d-49c1-bf7a-f344542379b8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.021631] env[61648]: DEBUG nova.compute.provider_tree [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.440620] env[61648]: DEBUG nova.network.neutron [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.525759] env[61648]: DEBUG nova.scheduler.client.report [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.944344] env[61648]: INFO nova.compute.manager [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: c98567aa-d978-4b4d-9e01-25ab70246205] Took 1.03 seconds to deallocate network for instance. [ 906.968846] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "88da5043-2922-4ef3-b92b-2a67894f4626" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.969098] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "88da5043-2922-4ef3-b92b-2a67894f4626" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.030100] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.030676] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 907.033205] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.499s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.034703] env[61648]: INFO nova.compute.claims [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.473686] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 907.542275] env[61648]: DEBUG nova.compute.utils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 907.545409] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 907.545579] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.616177] env[61648]: DEBUG nova.policy [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cf1769a604474cbd9e41252eaff77861', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e6bc9c03ef642d388cb01471f735491', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 907.885082] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Successfully created port: 8d0af576-0159-432b-a457-53de6f415b3e {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.986265] env[61648]: INFO nova.scheduler.client.report [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Deleted allocations for instance c98567aa-d978-4b4d-9e01-25ab70246205 [ 907.997455] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.049260] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 908.205369] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b11377a-21fb-4afe-8506-28b74eb5e0b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.214204] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406e8eed-0df8-4400-85b3-9b3fcb6c2e17 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.248064] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28e4864-831b-43e9-ab6f-ce7303669eef {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.256797] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d20297f-380c-46bb-bb74-e9eb9538dd8d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.270576] env[61648]: DEBUG nova.compute.provider_tree [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.499039] env[61648]: DEBUG oslo_concurrency.lockutils [None req-b6de8ef9-dd25-4b5a-a585-cacc3ffa8a16 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "c98567aa-d978-4b4d-9e01-25ab70246205" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.100s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.737836] env[61648]: DEBUG nova.compute.manager [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Received event network-changed-8d0af576-0159-432b-a457-53de6f415b3e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.738017] env[61648]: DEBUG nova.compute.manager [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Refreshing instance network info cache due to event network-changed-8d0af576-0159-432b-a457-53de6f415b3e. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 908.738194] env[61648]: DEBUG oslo_concurrency.lockutils [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] Acquiring lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.738307] env[61648]: DEBUG oslo_concurrency.lockutils [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] Acquired lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.738466] env[61648]: DEBUG nova.network.neutron [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Refreshing network info cache for port 8d0af576-0159-432b-a457-53de6f415b3e {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.773981] env[61648]: DEBUG nova.scheduler.client.report [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 908.965270] env[61648]: ERROR nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 908.965270] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 908.965270] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 908.965270] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 908.965270] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 908.965270] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 908.965270] env[61648]: ERROR nova.compute.manager raise self.value [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 908.965270] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 908.965270] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 908.965270] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 908.965952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 908.965952] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 908.965952] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 908.965952] env[61648]: ERROR nova.compute.manager [ 908.965952] env[61648]: Traceback (most recent call last): [ 908.965952] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 908.965952] env[61648]: listener.cb(fileno) [ 908.965952] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 908.965952] env[61648]: result = function(*args, **kwargs) [ 908.965952] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 908.965952] env[61648]: return func(*args, **kwargs) [ 908.965952] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 908.965952] env[61648]: raise e [ 908.965952] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 908.965952] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 908.965952] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 908.965952] env[61648]: created_port_ids = self._update_ports_for_instance( [ 908.965952] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 908.965952] env[61648]: with excutils.save_and_reraise_exception(): [ 908.965952] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 908.965952] env[61648]: self.force_reraise() [ 908.965952] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 908.965952] env[61648]: raise self.value [ 908.965952] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 908.965952] env[61648]: updated_port = self._update_port( [ 908.965952] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 908.965952] env[61648]: _ensure_no_port_binding_failure(port) [ 908.965952] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 908.965952] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 908.967037] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 908.967037] env[61648]: Removing descriptor: 19 [ 909.061089] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 909.084452] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.084694] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.084847] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.085038] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.085188] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.085333] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.085540] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.085695] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.085859] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.086337] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.086614] env[61648]: DEBUG nova.virt.hardware [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.087517] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaae8e2-4c7a-4d27-8cc0-6cefb6ca70c1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.095293] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523d9a84-7e47-48e4-a994-698c6374ae8b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.108511] env[61648]: ERROR nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Traceback (most recent call last): [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] yield resources [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self.driver.spawn(context, instance, image_meta, [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] vm_ref = self.build_virtual_machine(instance, [ 909.108511] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] for vif in network_info: [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] return self._sync_wrapper(fn, *args, **kwargs) [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self.wait() [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self[:] = self._gt.wait() [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] return self._exit_event.wait() [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 909.108999] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] current.throw(*self._exc) [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] result = function(*args, **kwargs) [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] return func(*args, **kwargs) [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] raise e [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] nwinfo = self.network_api.allocate_for_instance( [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] created_port_ids = self._update_ports_for_instance( [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] with excutils.save_and_reraise_exception(): [ 909.109503] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self.force_reraise() [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] raise self.value [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] updated_port = self._update_port( [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] _ensure_no_port_binding_failure(port) [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] raise exception.PortBindingFailed(port_id=port['id']) [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 909.109990] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] [ 909.109990] env[61648]: INFO nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Terminating instance [ 909.111404] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquiring lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.256305] env[61648]: DEBUG nova.network.neutron [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 909.278613] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.279175] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 909.281859] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.997s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.286018] env[61648]: INFO nova.compute.claims [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.353972] env[61648]: DEBUG nova.network.neutron [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.514444] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "e9389e15-e3af-416d-83f6-800af5a6aecf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.514752] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "e9389e15-e3af-416d-83f6-800af5a6aecf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.787346] env[61648]: DEBUG nova.compute.utils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 909.790542] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 909.790705] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.827883] env[61648]: DEBUG nova.policy [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab719482a69c4ba2b0725bb68a05930c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e97d3c0049d747fe80907ef09f3ed754', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 909.856151] env[61648]: DEBUG oslo_concurrency.lockutils [req-9903c9e1-0100-4742-8332-2795723edc07 req-f47f3808-a0a1-4722-850e-4b1b63ed702c service nova] Releasing lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.856507] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquired lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.856682] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.018765] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 910.090158] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Successfully created port: 15e1d73c-5798-4865-8d50-0b5eef74fbed {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.291888] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 910.385305] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.472776] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62246f65-b8e8-4848-a06e-d06e91f236ce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.481961] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4d1c8c-d698-49b7-a92b-83532536b796 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.486674] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.517288] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a2577a-7060-419a-b769-b5a1d76db5c5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.529532] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7734d3-8cdb-46d0-b051-707a5bc77dfd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.547731] env[61648]: DEBUG nova.compute.provider_tree [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.548443] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.818262] env[61648]: DEBUG nova.compute.manager [req-b67930c6-6bc6-4939-8858-22ba9814a76c req-90c668d5-ca26-4593-ac17-d681b6a1453f service nova] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Received event network-vif-deleted-8d0af576-0159-432b-a457-53de6f415b3e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.992029] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Releasing lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.992029] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 910.992029] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.992029] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e7df7d4-2a77-4d69-890b-ccd0fad2b0e2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.000159] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7c22eb-29c6-40fa-b21d-952eebc2149e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.023703] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e could not be found. [ 911.024135] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.024458] env[61648]: INFO nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 911.024871] env[61648]: DEBUG oslo.service.loopingcall [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.025223] env[61648]: DEBUG nova.compute.manager [-] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 911.025412] env[61648]: DEBUG nova.network.neutron [-] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.043010] env[61648]: DEBUG nova.network.neutron [-] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.053027] env[61648]: DEBUG nova.scheduler.client.report [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.101937] env[61648]: ERROR nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 911.101937] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.101937] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 911.101937] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 911.101937] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.101937] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.101937] env[61648]: ERROR nova.compute.manager raise self.value [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 911.101937] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 911.101937] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.101937] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 911.102469] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.102469] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 911.102469] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 911.102469] env[61648]: ERROR nova.compute.manager [ 911.102469] env[61648]: Traceback (most recent call last): [ 911.102469] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 911.102469] env[61648]: listener.cb(fileno) [ 911.102469] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 911.102469] env[61648]: result = function(*args, **kwargs) [ 911.102469] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 911.102469] env[61648]: return func(*args, **kwargs) [ 911.102469] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 911.102469] env[61648]: raise e [ 911.102469] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.102469] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 911.102469] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 911.102469] env[61648]: created_port_ids = self._update_ports_for_instance( [ 911.102469] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 911.102469] env[61648]: with excutils.save_and_reraise_exception(): [ 911.102469] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.102469] env[61648]: self.force_reraise() [ 911.102469] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.102469] env[61648]: raise self.value [ 911.102469] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 911.102469] env[61648]: updated_port = self._update_port( [ 911.102469] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.102469] env[61648]: _ensure_no_port_binding_failure(port) [ 911.102469] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.102469] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 911.103475] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 911.103475] env[61648]: Removing descriptor: 19 [ 911.311039] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 911.336024] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.336024] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.336024] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.336785] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.337087] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.340017] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.340017] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.340017] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.340017] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.340017] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.340246] env[61648]: DEBUG nova.virt.hardware [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.340246] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d2893d-541e-405f-b195-742879c760b2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.348408] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef85c82-4338-4642-81cb-a841d2f9f142 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.364097] env[61648]: ERROR nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Traceback (most recent call last): [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] yield resources [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self.driver.spawn(context, instance, image_meta, [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] vm_ref = self.build_virtual_machine(instance, [ 911.364097] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] for vif in network_info: [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] return self._sync_wrapper(fn, *args, **kwargs) [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self.wait() [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self[:] = self._gt.wait() [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] return self._exit_event.wait() [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 911.364417] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] current.throw(*self._exc) [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] result = function(*args, **kwargs) [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] return func(*args, **kwargs) [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] raise e [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] nwinfo = self.network_api.allocate_for_instance( [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] created_port_ids = self._update_ports_for_instance( [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] with excutils.save_and_reraise_exception(): [ 911.364733] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self.force_reraise() [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] raise self.value [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] updated_port = self._update_port( [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] _ensure_no_port_binding_failure(port) [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] raise exception.PortBindingFailed(port_id=port['id']) [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 911.365087] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] [ 911.367085] env[61648]: INFO nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Terminating instance [ 911.369210] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.369210] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.369210] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.545946] env[61648]: DEBUG nova.network.neutron [-] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.556084] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.556886] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 911.560326] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.136s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.560692] env[61648]: DEBUG nova.objects.instance [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lazy-loading 'resources' on Instance uuid e4adb624-e900-4838-a5c5-2cd0d488f458 {{(pid=61648) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.885823] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.975895] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.048342] env[61648]: INFO nova.compute.manager [-] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Took 1.02 seconds to deallocate network for instance. [ 912.050735] env[61648]: DEBUG nova.compute.claims [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 912.050913] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.062336] env[61648]: DEBUG nova.compute.utils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.067023] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 912.067023] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.105839] env[61648]: DEBUG nova.policy [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8658758cec10421ea417eb40a1a88ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61bcab083e6b4e1da5a11cfc573e1e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 912.231343] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a883eef-2ad1-4c9f-bad5-db8faff0e6ef {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.239197] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119c7bd6-7c34-481d-b412-7f3da770ccb3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.275676] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a82a837-6653-4818-85f6-d778938b380a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.282805] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166831e4-c254-4f54-8a30-b43156d34066 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.295822] env[61648]: DEBUG nova.compute.provider_tree [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.408285] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Successfully created port: 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.478582] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.479088] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 912.479261] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.479560] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11d1e464-f7bd-495e-9c8b-ba3ee342b350 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.488888] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97d2d7d-741d-40ef-981a-f1f76f79babb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.510077] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6c2f92c0-1346-4c9a-aa96-168b0987bb2f could not be found. [ 912.510309] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.510605] env[61648]: INFO nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 912.510745] env[61648]: DEBUG oslo.service.loopingcall [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.510985] env[61648]: DEBUG nova.compute.manager [-] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.511079] env[61648]: DEBUG nova.network.neutron [-] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.526163] env[61648]: DEBUG nova.network.neutron [-] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.570025] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 912.798596] env[61648]: DEBUG nova.scheduler.client.report [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.848558] env[61648]: DEBUG nova.compute.manager [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Received event network-changed-15e1d73c-5798-4865-8d50-0b5eef74fbed {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.848758] env[61648]: DEBUG nova.compute.manager [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Refreshing instance network info cache due to event network-changed-15e1d73c-5798-4865-8d50-0b5eef74fbed. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 912.848972] env[61648]: DEBUG oslo_concurrency.lockutils [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] Acquiring lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.849122] env[61648]: DEBUG oslo_concurrency.lockutils [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] Acquired lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.849278] env[61648]: DEBUG nova.network.neutron [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Refreshing network info cache for port 15e1d73c-5798-4865-8d50-0b5eef74fbed {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.027872] env[61648]: DEBUG nova.network.neutron [-] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.245380] env[61648]: ERROR nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 913.245380] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 913.245380] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 913.245380] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 913.245380] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 913.245380] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 913.245380] env[61648]: ERROR nova.compute.manager raise self.value [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 913.245380] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 913.245380] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 913.245380] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 913.245816] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 913.245816] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 913.245816] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 913.245816] env[61648]: ERROR nova.compute.manager [ 913.245816] env[61648]: Traceback (most recent call last): [ 913.245816] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 913.245816] env[61648]: listener.cb(fileno) [ 913.245816] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 913.245816] env[61648]: result = function(*args, **kwargs) [ 913.245816] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 913.245816] env[61648]: return func(*args, **kwargs) [ 913.245816] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 913.245816] env[61648]: raise e [ 913.245816] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 913.245816] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 913.245816] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 913.245816] env[61648]: created_port_ids = self._update_ports_for_instance( [ 913.245816] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 913.245816] env[61648]: with excutils.save_and_reraise_exception(): [ 913.245816] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 913.245816] env[61648]: self.force_reraise() [ 913.245816] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 913.245816] env[61648]: raise self.value [ 913.245816] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 913.245816] env[61648]: updated_port = self._update_port( [ 913.245816] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 913.245816] env[61648]: _ensure_no_port_binding_failure(port) [ 913.245816] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 913.245816] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 913.246533] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 913.246533] env[61648]: Removing descriptor: 19 [ 913.303655] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.310602] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.795s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.312266] env[61648]: INFO nova.compute.claims [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.337976] env[61648]: INFO nova.scheduler.client.report [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Deleted allocations for instance e4adb624-e900-4838-a5c5-2cd0d488f458 [ 913.368395] env[61648]: DEBUG nova.network.neutron [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.445244] env[61648]: DEBUG nova.network.neutron [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.530191] env[61648]: INFO nova.compute.manager [-] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Took 1.02 seconds to deallocate network for instance. [ 913.535085] env[61648]: DEBUG nova.compute.claims [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 913.535247] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.579271] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 913.612159] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.612421] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.612575] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.612797] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.612947] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.613103] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.613334] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.613464] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.613632] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.613849] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.614049] env[61648]: DEBUG nova.virt.hardware [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.614937] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e36491c-6348-41e2-86b8-96dcea92e78f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.623375] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d56915-ca7f-4fae-80ee-915fbadf47d1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.637190] env[61648]: ERROR nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Traceback (most recent call last): [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] yield resources [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self.driver.spawn(context, instance, image_meta, [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] vm_ref = self.build_virtual_machine(instance, [ 913.637190] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] vif_infos = vmwarevif.get_vif_info(self._session, [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] for vif in network_info: [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] return self._sync_wrapper(fn, *args, **kwargs) [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self.wait() [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self[:] = self._gt.wait() [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] return self._exit_event.wait() [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 913.637831] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] current.throw(*self._exc) [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] result = function(*args, **kwargs) [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] return func(*args, **kwargs) [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] raise e [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] nwinfo = self.network_api.allocate_for_instance( [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] created_port_ids = self._update_ports_for_instance( [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] with excutils.save_and_reraise_exception(): [ 913.638434] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self.force_reraise() [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] raise self.value [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] updated_port = self._update_port( [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] _ensure_no_port_binding_failure(port) [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] raise exception.PortBindingFailed(port_id=port['id']) [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 913.638981] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] [ 913.638981] env[61648]: INFO nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Terminating instance [ 913.639878] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.639994] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.640201] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.844776] env[61648]: DEBUG oslo_concurrency.lockutils [None req-fa809f43-7f6c-4629-9dbe-980e165c9762 tempest-ServersAaction247Test-722687821 tempest-ServersAaction247Test-722687821-project-member] Lock "e4adb624-e900-4838-a5c5-2cd0d488f458" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.187s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.949294] env[61648]: DEBUG oslo_concurrency.lockutils [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] Releasing lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.949559] env[61648]: DEBUG nova.compute.manager [req-6fc4f405-3127-4a1a-abf9-9455b274a2f8 req-0836087e-28de-43f2-8e38-499e2a121f3d service nova] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Received event network-vif-deleted-15e1d73c-5798-4865-8d50-0b5eef74fbed {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.158125] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.261898] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.453542] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee1967f-919c-4a44-a567-ab8a0416f3ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.460796] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc38eeb-f277-4f9f-9b51-1674a61946c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.490830] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160c5f93-e4bb-4d69-b586-758110efa1e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.497846] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62db72a7-00bf-40c3-92cf-985b2e55722b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.510742] env[61648]: DEBUG nova.compute.provider_tree [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.764816] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.765298] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 914.765498] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.765807] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30de20d2-87b8-40f2-bae2-8923d016114f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.775120] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add87277-0b45-4b9a-a146-1ed49fded8df {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.795736] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3c33a19e-211a-43f6-ae79-596f1c070a76 could not be found. [ 914.795946] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.796136] env[61648]: INFO nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Took 0.03 seconds to destroy the instance on the hypervisor. [ 914.796366] env[61648]: DEBUG oslo.service.loopingcall [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.796569] env[61648]: DEBUG nova.compute.manager [-] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 914.796658] env[61648]: DEBUG nova.network.neutron [-] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.811243] env[61648]: DEBUG nova.network.neutron [-] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.875369] env[61648]: DEBUG nova.compute.manager [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Received event network-changed-40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.875495] env[61648]: DEBUG nova.compute.manager [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Refreshing instance network info cache due to event network-changed-40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 914.875713] env[61648]: DEBUG oslo_concurrency.lockutils [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] Acquiring lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.875799] env[61648]: DEBUG oslo_concurrency.lockutils [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] Acquired lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.875950] env[61648]: DEBUG nova.network.neutron [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Refreshing network info cache for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.014288] env[61648]: DEBUG nova.scheduler.client.report [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.313684] env[61648]: DEBUG nova.network.neutron [-] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.393959] env[61648]: DEBUG nova.network.neutron [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.466777] env[61648]: DEBUG nova.network.neutron [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.519390] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.519907] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.522720] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.690s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.816182] env[61648]: INFO nova.compute.manager [-] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Took 1.02 seconds to deallocate network for instance. [ 915.818545] env[61648]: DEBUG nova.compute.claims [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 915.818715] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.969590] env[61648]: DEBUG oslo_concurrency.lockutils [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] Releasing lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.969862] env[61648]: DEBUG nova.compute.manager [req-42082c37-841a-4285-b36b-3052eb15ea4a req-79952cfa-9d5e-4ace-9faa-d0242c730eff service nova] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Received event network-vif-deleted-40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.027676] env[61648]: DEBUG nova.compute.utils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 916.032172] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 916.032348] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.088393] env[61648]: DEBUG nova.policy [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99aee7b8e206476f86165128d056b68c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f74c8585407422bbaab2c440dce9489', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 916.180956] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8cccfb-2509-4dfd-b44e-5ab0b4dd10c2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.189889] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed57db6c-4410-40e7-951e-039ffe343108 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.219549] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2065b3d6-94ce-4e1a-97f5-65d0db84479b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.226496] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c843c2-cf49-4874-8934-63e14cf9bbfd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.240222] env[61648]: DEBUG nova.compute.provider_tree [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.377587] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Successfully created port: dc4c8a9d-47f1-4670-98aa-a5de7b14453e {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.532625] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.743663] env[61648]: DEBUG nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.196128] env[61648]: DEBUG nova.compute.manager [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Received event network-changed-dc4c8a9d-47f1-4670-98aa-a5de7b14453e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.196128] env[61648]: DEBUG nova.compute.manager [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Refreshing instance network info cache due to event network-changed-dc4c8a9d-47f1-4670-98aa-a5de7b14453e. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 917.196433] env[61648]: DEBUG oslo_concurrency.lockutils [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] Acquiring lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.196537] env[61648]: DEBUG oslo_concurrency.lockutils [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] Acquired lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.196716] env[61648]: DEBUG nova.network.neutron [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Refreshing network info cache for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.248846] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.726s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.249501] env[61648]: ERROR nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Traceback (most recent call last): [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self.driver.spawn(context, instance, image_meta, [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] vm_ref = self.build_virtual_machine(instance, [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] vif_infos = vmwarevif.get_vif_info(self._session, [ 917.249501] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] for vif in network_info: [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return self._sync_wrapper(fn, *args, **kwargs) [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self.wait() [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self[:] = self._gt.wait() [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return self._exit_event.wait() [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] result = hub.switch() [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 917.250011] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return self.greenlet.switch() [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] result = function(*args, **kwargs) [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] return func(*args, **kwargs) [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] raise e [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] nwinfo = self.network_api.allocate_for_instance( [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] created_port_ids = self._update_ports_for_instance( [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] with excutils.save_and_reraise_exception(): [ 917.250569] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] self.force_reraise() [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] raise self.value [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] updated_port = self._update_port( [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] _ensure_no_port_binding_failure(port) [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] raise exception.PortBindingFailed(port_id=port['id']) [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] nova.exception.PortBindingFailed: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. [ 917.251152] env[61648]: ERROR nova.compute.manager [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] [ 917.251616] env[61648]: DEBUG nova.compute.utils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 917.251616] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.498s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.252893] env[61648]: INFO nova.compute.claims [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.255661] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Build of instance b1cd355c-ea96-4ff9-aa40-6605c8b73e3b was re-scheduled: Binding failed for port 38c486f9-8e74-4f48-839f-0f3377724cf1, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 917.256246] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 917.256323] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquiring lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.256470] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Acquired lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.256625] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.448338] env[61648]: ERROR nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 917.448338] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.448338] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.448338] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.448338] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.448338] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.448338] env[61648]: ERROR nova.compute.manager raise self.value [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.448338] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 917.448338] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.448338] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 917.449072] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.449072] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 917.449072] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 917.449072] env[61648]: ERROR nova.compute.manager [ 917.449072] env[61648]: Traceback (most recent call last): [ 917.449072] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 917.449072] env[61648]: listener.cb(fileno) [ 917.449072] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.449072] env[61648]: result = function(*args, **kwargs) [ 917.449072] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 917.449072] env[61648]: return func(*args, **kwargs) [ 917.449072] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.449072] env[61648]: raise e [ 917.449072] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.449072] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 917.449072] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.449072] env[61648]: created_port_ids = self._update_ports_for_instance( [ 917.449072] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.449072] env[61648]: with excutils.save_and_reraise_exception(): [ 917.449072] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.449072] env[61648]: self.force_reraise() [ 917.449072] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.449072] env[61648]: raise self.value [ 917.449072] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.449072] env[61648]: updated_port = self._update_port( [ 917.449072] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.449072] env[61648]: _ensure_no_port_binding_failure(port) [ 917.449072] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.449072] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 917.450052] env[61648]: nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 917.450052] env[61648]: Removing descriptor: 19 [ 917.542997] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.566461] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.566708] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.566860] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.567047] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.567196] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.567342] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.567549] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.567706] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.567872] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.568480] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.568730] env[61648]: DEBUG nova.virt.hardware [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.569633] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7d31dc-ecac-4f8c-841b-ae095731a36b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.577828] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210715ed-0a0a-49fd-b163-b9a0e194bd53 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.591545] env[61648]: ERROR nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Traceback (most recent call last): [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] yield resources [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self.driver.spawn(context, instance, image_meta, [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] vm_ref = self.build_virtual_machine(instance, [ 917.591545] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] vif_infos = vmwarevif.get_vif_info(self._session, [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] for vif in network_info: [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] return self._sync_wrapper(fn, *args, **kwargs) [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self.wait() [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self[:] = self._gt.wait() [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] return self._exit_event.wait() [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 917.592106] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] current.throw(*self._exc) [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] result = function(*args, **kwargs) [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] return func(*args, **kwargs) [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] raise e [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] nwinfo = self.network_api.allocate_for_instance( [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] created_port_ids = self._update_ports_for_instance( [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] with excutils.save_and_reraise_exception(): [ 917.592653] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self.force_reraise() [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] raise self.value [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] updated_port = self._update_port( [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] _ensure_no_port_binding_failure(port) [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] raise exception.PortBindingFailed(port_id=port['id']) [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 917.593287] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] [ 917.593287] env[61648]: INFO nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Terminating instance [ 917.593776] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.716172] env[61648]: DEBUG nova.network.neutron [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.775921] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.798949] env[61648]: DEBUG nova.network.neutron [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.863964] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.302516] env[61648]: DEBUG oslo_concurrency.lockutils [req-470da294-5e4a-48ff-b9ff-cddb30f94e4c req-1d190752-845b-43bb-91db-7e0e0c6a59f1 service nova] Releasing lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.303145] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquired lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.303328] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.366624] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Releasing lock "refresh_cache-b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.367419] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 918.367419] env[61648]: DEBUG nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 918.367419] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.393569] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.424085] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfceb3f9-a661-4df3-9492-94019676a8f0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.431732] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64232b92-1bfd-49fc-8f71-52bc804efe45 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.461135] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0461dd-5270-472d-8f40-6aae8565eb29 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.468833] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cca45bb-5009-4b08-b50a-ab03b6552e8e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.481196] env[61648]: DEBUG nova.compute.provider_tree [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.822969] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.895755] env[61648]: DEBUG nova.network.neutron [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.902937] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.984599] env[61648]: DEBUG nova.scheduler.client.report [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.225662] env[61648]: DEBUG nova.compute.manager [req-9334e2c3-3f4f-4545-a3ee-846c7798456d req-33ba340c-b934-44a5-8f48-18029353d63a service nova] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Received event network-vif-deleted-dc4c8a9d-47f1-4670-98aa-a5de7b14453e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.398716] env[61648]: INFO nova.compute.manager [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] [instance: b1cd355c-ea96-4ff9-aa40-6605c8b73e3b] Took 1.03 seconds to deallocate network for instance. [ 919.405254] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Releasing lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.405639] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 919.405825] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.406122] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06f11561-589a-4984-a192-d5c87799b6a2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.416637] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10181d69-c3bd-4434-ba1e-1823899169a1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.437218] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21e36276-c4d1-4941-a216-22fee34dcb29 could not be found. [ 919.437475] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.437701] env[61648]: INFO nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Took 0.03 seconds to destroy the instance on the hypervisor. [ 919.437972] env[61648]: DEBUG oslo.service.loopingcall [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.438214] env[61648]: DEBUG nova.compute.manager [-] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 919.438309] env[61648]: DEBUG nova.network.neutron [-] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.456169] env[61648]: DEBUG nova.network.neutron [-] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.489591] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.238s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.490215] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 919.493264] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.510s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.958040] env[61648]: DEBUG nova.network.neutron [-] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.997800] env[61648]: DEBUG nova.compute.utils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.001841] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 920.002017] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 920.042195] env[61648]: DEBUG nova.policy [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd12864c85b7f4ac08d65ec669738ba2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edfec0927d604abe9a75989e28e8dadc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 920.138907] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2e8497-9461-4517-9a4a-f8819ea5f8b4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.147259] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cf5ca1-b3a1-4fde-8ab2-1ed35f2fd274 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.178631] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb991da3-f27e-420a-9336-9d9e3b904608 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.185436] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1067e9b-c43b-4f5d-a689-26b1bc221274 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.896796] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Successfully created port: 1e794ac3-2f11-44a3-b311-0efc6c209043 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.901303] env[61648]: INFO nova.compute.manager [-] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Took 1.46 seconds to deallocate network for instance. [ 920.901752] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 920.908365] env[61648]: DEBUG nova.compute.claims [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 920.908365] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.915941] env[61648]: DEBUG nova.compute.provider_tree [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.929404] env[61648]: INFO nova.scheduler.client.report [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Deleted allocations for instance b1cd355c-ea96-4ff9-aa40-6605c8b73e3b [ 921.419305] env[61648]: DEBUG nova.scheduler.client.report [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.437278] env[61648]: DEBUG oslo_concurrency.lockutils [None req-06f6c305-630f-4c5f-88bd-fd7edf2217af tempest-ServerActionsTestOtherA-1490909156 tempest-ServerActionsTestOtherA-1490909156-project-member] Lock "b1cd355c-ea96-4ff9-aa40-6605c8b73e3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.779s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.583492] env[61648]: DEBUG nova.compute.manager [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Received event network-changed-1e794ac3-2f11-44a3-b311-0efc6c209043 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.583686] env[61648]: DEBUG nova.compute.manager [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Refreshing instance network info cache due to event network-changed-1e794ac3-2f11-44a3-b311-0efc6c209043. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 921.583896] env[61648]: DEBUG oslo_concurrency.lockutils [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] Acquiring lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.584958] env[61648]: DEBUG oslo_concurrency.lockutils [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] Acquired lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.584958] env[61648]: DEBUG nova.network.neutron [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Refreshing network info cache for port 1e794ac3-2f11-44a3-b311-0efc6c209043 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.791952] env[61648]: ERROR nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 921.791952] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.791952] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 921.791952] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 921.791952] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.791952] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.791952] env[61648]: ERROR nova.compute.manager raise self.value [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 921.791952] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 921.791952] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.791952] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 921.792698] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.792698] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 921.792698] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 921.792698] env[61648]: ERROR nova.compute.manager [ 921.792698] env[61648]: Traceback (most recent call last): [ 921.792698] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 921.792698] env[61648]: listener.cb(fileno) [ 921.792698] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 921.792698] env[61648]: result = function(*args, **kwargs) [ 921.792698] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 921.792698] env[61648]: return func(*args, **kwargs) [ 921.792698] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 921.792698] env[61648]: raise e [ 921.792698] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.792698] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 921.792698] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 921.792698] env[61648]: created_port_ids = self._update_ports_for_instance( [ 921.792698] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 921.792698] env[61648]: with excutils.save_and_reraise_exception(): [ 921.792698] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.792698] env[61648]: self.force_reraise() [ 921.792698] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.792698] env[61648]: raise self.value [ 921.792698] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 921.792698] env[61648]: updated_port = self._update_port( [ 921.792698] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.792698] env[61648]: _ensure_no_port_binding_failure(port) [ 921.792698] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.792698] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 921.793627] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 921.793627] env[61648]: Removing descriptor: 16 [ 921.913037] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 921.923542] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.430s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.924173] env[61648]: ERROR nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Traceback (most recent call last): [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self.driver.spawn(context, instance, image_meta, [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self._vmops.spawn(context, instance, image_meta, injected_files, [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] vm_ref = self.build_virtual_machine(instance, [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] vif_infos = vmwarevif.get_vif_info(self._session, [ 921.924173] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] for vif in network_info: [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return self._sync_wrapper(fn, *args, **kwargs) [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self.wait() [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self[:] = self._gt.wait() [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return self._exit_event.wait() [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] result = hub.switch() [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 921.924666] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return self.greenlet.switch() [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] result = function(*args, **kwargs) [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] return func(*args, **kwargs) [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] raise e [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] nwinfo = self.network_api.allocate_for_instance( [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] created_port_ids = self._update_ports_for_instance( [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] with excutils.save_and_reraise_exception(): [ 921.925163] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] self.force_reraise() [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] raise self.value [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] updated_port = self._update_port( [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] _ensure_no_port_binding_failure(port) [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] raise exception.PortBindingFailed(port_id=port['id']) [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] nova.exception.PortBindingFailed: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. [ 921.925505] env[61648]: ERROR nova.compute.manager [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] [ 921.925952] env[61648]: DEBUG nova.compute.utils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 921.926277] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.496s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.927529] env[61648]: INFO nova.compute.claims [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.934276] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Build of instance 3002571b-4800-48a9-84c1-68f6d3e0cc70 was re-scheduled: Binding failed for port d773c4c9-9656-4a2a-8e15-8f7430160d6d, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 921.934276] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 921.934276] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.934276] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquired lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.934426] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.951306] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 921.952323] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 921.952323] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.952323] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 921.952323] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.952323] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 921.952642] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 921.952642] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 921.952642] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 921.952790] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 921.952965] env[61648]: DEBUG nova.virt.hardware [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 921.958020] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c72a26-1809-4356-905a-56824dd7758c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.963663] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ff64df-23eb-41ba-8d11-13f0d490f9c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.980519] env[61648]: ERROR nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Traceback (most recent call last): [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] yield resources [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self.driver.spawn(context, instance, image_meta, [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] vm_ref = self.build_virtual_machine(instance, [ 921.980519] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] vif_infos = vmwarevif.get_vif_info(self._session, [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] for vif in network_info: [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] return self._sync_wrapper(fn, *args, **kwargs) [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self.wait() [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self[:] = self._gt.wait() [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] return self._exit_event.wait() [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 921.980924] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] current.throw(*self._exc) [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] result = function(*args, **kwargs) [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] return func(*args, **kwargs) [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] raise e [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] nwinfo = self.network_api.allocate_for_instance( [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] created_port_ids = self._update_ports_for_instance( [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] with excutils.save_and_reraise_exception(): [ 921.981363] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self.force_reraise() [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] raise self.value [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] updated_port = self._update_port( [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] _ensure_no_port_binding_failure(port) [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] raise exception.PortBindingFailed(port_id=port['id']) [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 921.981716] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] [ 921.981716] env[61648]: INFO nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Terminating instance [ 921.982124] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquiring lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.101021] env[61648]: DEBUG nova.network.neutron [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.176895] env[61648]: DEBUG nova.network.neutron [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.451355] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.519973] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.679197] env[61648]: DEBUG oslo_concurrency.lockutils [req-92e560be-d2c3-45f9-8ca6-b83ac9ce8b43 req-89670cec-f936-4a57-a65c-2edfcaa5644d service nova] Releasing lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.680176] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquired lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.680807] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.025134] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Releasing lock "refresh_cache-3002571b-4800-48a9-84c1-68f6d3e0cc70" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.025134] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 923.025134] env[61648]: DEBUG nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 923.025134] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.040893] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.078655] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4a6632-b37a-4775-b59a-f5e4854e5c04 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.092155] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce1fbf4-82b7-4e78-88c2-5caa896843d4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.126521] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb4d93b-df51-44c7-b333-913d0b90b378 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.135730] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2e6df9-1638-42cb-8f17-a806db103b7b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.151135] env[61648]: DEBUG nova.compute.provider_tree [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.206640] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.282652] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.549479] env[61648]: DEBUG nova.network.neutron [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.614405] env[61648]: DEBUG nova.compute.manager [req-2906d6ca-39bc-43c1-bf4d-332c7e7ab09d req-d6a812a4-839b-430d-a20a-8c2bfca4188d service nova] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Received event network-vif-deleted-1e794ac3-2f11-44a3-b311-0efc6c209043 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 923.657060] env[61648]: DEBUG nova.scheduler.client.report [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.792420] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Releasing lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.792420] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 923.792420] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.792420] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6b7201c-7f49-4474-8146-1fa7af5cf48b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.806020] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485e42f7-fbe6-410a-b81a-11c7455578f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.833627] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e721a05d-86fc-4c0e-839a-107d34ec9cdb could not be found. [ 923.836744] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.836744] env[61648]: INFO nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 923.836744] env[61648]: DEBUG oslo.service.loopingcall [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.836744] env[61648]: DEBUG nova.compute.manager [-] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 923.836744] env[61648]: DEBUG nova.network.neutron [-] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.852541] env[61648]: DEBUG nova.network.neutron [-] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.054020] env[61648]: INFO nova.compute.manager [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 3002571b-4800-48a9-84c1-68f6d3e0cc70] Took 1.03 seconds to deallocate network for instance. [ 924.160362] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.163036] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 924.164049] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.167s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.169301] env[61648]: INFO nova.compute.claims [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.356697] env[61648]: DEBUG nova.network.neutron [-] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.674989] env[61648]: DEBUG nova.compute.utils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 924.676465] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 924.676638] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 924.793995] env[61648]: DEBUG nova.policy [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75788746b2214f2e8c1a8884c89ddb9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd94e7e89f424d34920f0fa92acf3226', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 924.859758] env[61648]: INFO nova.compute.manager [-] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Took 1.02 seconds to deallocate network for instance. [ 924.863941] env[61648]: DEBUG nova.compute.claims [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 924.864327] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.094047] env[61648]: INFO nova.scheduler.client.report [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Deleted allocations for instance 3002571b-4800-48a9-84c1-68f6d3e0cc70 [ 925.185773] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 925.271039] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Successfully created port: cd032fd6-f80f-433e-ab30-72b6647f9fed {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.328558] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb84c4a-f2b3-4f12-a67e-b71bb7775470 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.337330] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05977546-e80c-406b-ac03-3510337c679c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.366341] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0132249c-5d4b-495d-bf88-0cabdc5e4cc7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.373341] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b5a257-800d-41c7-9079-4cfe37b4edf5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.386752] env[61648]: DEBUG nova.compute.provider_tree [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.602754] env[61648]: DEBUG oslo_concurrency.lockutils [None req-037e3912-6bdf-42e6-9ed8-ced7b713af6e tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "3002571b-4800-48a9-84c1-68f6d3e0cc70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.931s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.890318] env[61648]: DEBUG nova.scheduler.client.report [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.196521] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 926.239155] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.243019] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.243019] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.243019] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.243019] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.243019] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.243296] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.243296] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.243296] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.243296] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.243296] env[61648]: DEBUG nova.virt.hardware [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.243455] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f180a87b-e386-475f-9a09-e88d1c23c666 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.253830] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077e0b87-e1a6-48ec-9ec0-a8154194a996 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.400028] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.400028] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 926.403370] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.854s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.405261] env[61648]: INFO nova.compute.claims [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.623338] env[61648]: DEBUG nova.compute.manager [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Received event network-changed-cd032fd6-f80f-433e-ab30-72b6647f9fed {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.623338] env[61648]: DEBUG nova.compute.manager [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Refreshing instance network info cache due to event network-changed-cd032fd6-f80f-433e-ab30-72b6647f9fed. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.623338] env[61648]: DEBUG oslo_concurrency.lockutils [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] Acquiring lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.623338] env[61648]: DEBUG oslo_concurrency.lockutils [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] Acquired lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.623338] env[61648]: DEBUG nova.network.neutron [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Refreshing network info cache for port cd032fd6-f80f-433e-ab30-72b6647f9fed {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.910608] env[61648]: DEBUG nova.compute.utils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.918370] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 926.918370] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.969905] env[61648]: ERROR nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 926.969905] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 926.969905] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 926.969905] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 926.969905] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 926.969905] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 926.969905] env[61648]: ERROR nova.compute.manager raise self.value [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 926.969905] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 926.969905] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 926.969905] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 926.970400] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 926.970400] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 926.970400] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 926.970400] env[61648]: ERROR nova.compute.manager [ 926.970400] env[61648]: Traceback (most recent call last): [ 926.970400] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 926.970400] env[61648]: listener.cb(fileno) [ 926.970400] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 926.970400] env[61648]: result = function(*args, **kwargs) [ 926.970400] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 926.970400] env[61648]: return func(*args, **kwargs) [ 926.970400] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 926.970400] env[61648]: raise e [ 926.970400] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 926.970400] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 926.970400] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 926.970400] env[61648]: created_port_ids = self._update_ports_for_instance( [ 926.970400] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 926.970400] env[61648]: with excutils.save_and_reraise_exception(): [ 926.970400] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 926.970400] env[61648]: self.force_reraise() [ 926.970400] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 926.970400] env[61648]: raise self.value [ 926.970400] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 926.970400] env[61648]: updated_port = self._update_port( [ 926.970400] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 926.970400] env[61648]: _ensure_no_port_binding_failure(port) [ 926.970400] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 926.970400] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 926.971185] env[61648]: nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 926.971185] env[61648]: Removing descriptor: 16 [ 926.971185] env[61648]: ERROR nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Traceback (most recent call last): [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] yield resources [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self.driver.spawn(context, instance, image_meta, [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self._vmops.spawn(context, instance, image_meta, injected_files, [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 926.971185] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] vm_ref = self.build_virtual_machine(instance, [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] vif_infos = vmwarevif.get_vif_info(self._session, [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] for vif in network_info: [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return self._sync_wrapper(fn, *args, **kwargs) [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self.wait() [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self[:] = self._gt.wait() [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return self._exit_event.wait() [ 926.971525] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] result = hub.switch() [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return self.greenlet.switch() [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] result = function(*args, **kwargs) [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return func(*args, **kwargs) [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] raise e [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] nwinfo = self.network_api.allocate_for_instance( [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 926.971880] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] created_port_ids = self._update_ports_for_instance( [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] with excutils.save_and_reraise_exception(): [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self.force_reraise() [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] raise self.value [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] updated_port = self._update_port( [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] _ensure_no_port_binding_failure(port) [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 926.972266] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] raise exception.PortBindingFailed(port_id=port['id']) [ 926.972556] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 926.972556] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] [ 926.972556] env[61648]: INFO nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Terminating instance [ 926.974270] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.977385] env[61648]: DEBUG nova.policy [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '876b46a0e3c542eb9e267f0e0615123c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ccacb6024de431092dd0610c5ca38cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 927.148383] env[61648]: DEBUG nova.network.neutron [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.280404] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Successfully created port: 21583970-0c3e-4369-b110-f8170f23c9fc {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.285720] env[61648]: DEBUG nova.network.neutron [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.414926] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 927.565710] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426aed26-7635-46ee-983b-5a84095f2471 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.578927] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5239ba5e-536b-414e-b7da-1061c80bf726 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.615764] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb2ac63-8511-4a27-a84f-112c5b8e08e0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.623301] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8498c7-9bf0-4527-8bfb-f3cd2a416ffa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.642363] env[61648]: DEBUG nova.compute.provider_tree [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.792724] env[61648]: DEBUG oslo_concurrency.lockutils [req-8081940c-8c5b-4492-b1e5-e27a8cba94e0 req-d5ba7df8-ff7b-48dd-8375-15de78b68588 service nova] Releasing lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.792724] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.792724] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.146363] env[61648]: DEBUG nova.scheduler.client.report [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.255468] env[61648]: DEBUG nova.compute.manager [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Received event network-changed-21583970-0c3e-4369-b110-f8170f23c9fc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.256896] env[61648]: DEBUG nova.compute.manager [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Refreshing instance network info cache due to event network-changed-21583970-0c3e-4369-b110-f8170f23c9fc. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 928.257154] env[61648]: DEBUG oslo_concurrency.lockutils [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] Acquiring lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.257320] env[61648]: DEBUG oslo_concurrency.lockutils [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] Acquired lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.257481] env[61648]: DEBUG nova.network.neutron [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Refreshing network info cache for port 21583970-0c3e-4369-b110-f8170f23c9fc {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.311805] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.408721] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.427558] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 928.453211] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquiring lock "6e3e6e83-93dc-4e63-9955-a3519e6a4df1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.453486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Lock "6e3e6e83-93dc-4e63-9955-a3519e6a4df1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.469353] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 928.469598] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 928.469740] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.469913] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 928.470612] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.471106] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 928.471106] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 928.471262] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 928.471467] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 928.472634] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 928.472894] env[61648]: DEBUG nova.virt.hardware [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.473748] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4239c568-a1aa-4f8c-b853-b47f0c86d4a2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.480268] env[61648]: ERROR nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 928.480268] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 928.480268] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 928.480268] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 928.480268] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 928.480268] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 928.480268] env[61648]: ERROR nova.compute.manager raise self.value [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 928.480268] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 928.480268] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 928.480268] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 928.480718] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 928.480718] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 928.480718] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 928.480718] env[61648]: ERROR nova.compute.manager [ 928.480718] env[61648]: Traceback (most recent call last): [ 928.480718] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 928.480718] env[61648]: listener.cb(fileno) [ 928.480718] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 928.480718] env[61648]: result = function(*args, **kwargs) [ 928.480718] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 928.480718] env[61648]: return func(*args, **kwargs) [ 928.480718] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 928.480718] env[61648]: raise e [ 928.480718] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 928.480718] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 928.480718] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 928.480718] env[61648]: created_port_ids = self._update_ports_for_instance( [ 928.480718] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 928.480718] env[61648]: with excutils.save_and_reraise_exception(): [ 928.480718] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 928.480718] env[61648]: self.force_reraise() [ 928.480718] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 928.480718] env[61648]: raise self.value [ 928.480718] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 928.480718] env[61648]: updated_port = self._update_port( [ 928.480718] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 928.480718] env[61648]: _ensure_no_port_binding_failure(port) [ 928.480718] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 928.480718] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 928.481565] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 928.481565] env[61648]: Removing descriptor: 19 [ 928.485155] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850cf8ca-919b-4071-8278-4267b0531689 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.505377] env[61648]: ERROR nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Traceback (most recent call last): [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] yield resources [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self.driver.spawn(context, instance, image_meta, [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self._vmops.spawn(context, instance, image_meta, injected_files, [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] vm_ref = self.build_virtual_machine(instance, [ 928.505377] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] vif_infos = vmwarevif.get_vif_info(self._session, [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] for vif in network_info: [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] return self._sync_wrapper(fn, *args, **kwargs) [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self.wait() [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self[:] = self._gt.wait() [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] return self._exit_event.wait() [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 928.505917] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] current.throw(*self._exc) [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] result = function(*args, **kwargs) [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] return func(*args, **kwargs) [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] raise e [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] nwinfo = self.network_api.allocate_for_instance( [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] created_port_ids = self._update_ports_for_instance( [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] with excutils.save_and_reraise_exception(): [ 928.506269] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self.force_reraise() [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] raise self.value [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] updated_port = self._update_port( [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] _ensure_no_port_binding_failure(port) [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] raise exception.PortBindingFailed(port_id=port['id']) [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 928.506623] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] [ 928.506623] env[61648]: INFO nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Terminating instance [ 928.509054] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.650827] env[61648]: DEBUG nova.compute.manager [req-6bb8d9c7-96ed-4849-9467-efce5339a645 req-cb5a094d-c73a-4994-85f0-7b10fe10c46d service nova] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Received event network-vif-deleted-cd032fd6-f80f-433e-ab30-72b6647f9fed {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.651687] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.656023] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 928.656908] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.605s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.755606] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "5984ee2f-103a-4999-9229-34c4d21779f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.755606] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "5984ee2f-103a-4999-9229-34c4d21779f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.777128] env[61648]: DEBUG nova.network.neutron [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.871193] env[61648]: DEBUG nova.network.neutron [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.912906] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.913443] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 928.913707] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.913935] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f04341fb-6c53-4aae-8578-b36bfac6a0e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.923814] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07345d74-4248-418f-bacd-fa632bd78571 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.944468] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e7c41d2a-1aed-44e9-959b-2369a8d66547 could not be found. [ 928.944698] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.944879] env[61648]: INFO nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Took 0.03 seconds to destroy the instance on the hypervisor. [ 928.945130] env[61648]: DEBUG oslo.service.loopingcall [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.945424] env[61648]: DEBUG nova.compute.manager [-] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 928.945522] env[61648]: DEBUG nova.network.neutron [-] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.960558] env[61648]: DEBUG nova.network.neutron [-] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.961803] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 929.157425] env[61648]: DEBUG nova.compute.utils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.158842] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 929.159013] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 929.212150] env[61648]: DEBUG nova.policy [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05baa26c79e1430c9945bfa82bd802dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '413caef8b4b34ad49a8aa707ca007dbd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 929.257491] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 929.304692] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f112b0-4fb5-47ab-889d-8e9f53df6e32 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.313141] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac956c7a-89d1-4b30-bf34-ad0c299de224 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.352034] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d755685-2361-449d-a994-4ee4722084c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.360236] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58345017-8998-4fe4-bfb6-6d20bb2b35b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.374859] env[61648]: DEBUG nova.compute.provider_tree [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.376464] env[61648]: DEBUG oslo_concurrency.lockutils [req-34a9ecf4-6fb2-4d3b-be7e-3db374da7576 req-6e3139d6-baf4-47c4-87e2-2542fb248650 service nova] Releasing lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.376978] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquired lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.377174] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.468882] env[61648]: DEBUG nova.network.neutron [-] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.496217] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.555681] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Successfully created port: eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.665023] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 929.787390] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.879086] env[61648]: DEBUG nova.scheduler.client.report [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 929.909088] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.971603] env[61648]: INFO nova.compute.manager [-] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Took 1.03 seconds to deallocate network for instance. [ 929.974824] env[61648]: DEBUG nova.compute.claims [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 929.975015] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.980368] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.298473] env[61648]: DEBUG nova.compute.manager [req-d222afce-0666-4666-beb4-7f1e11717b57 req-67209c69-2eaa-46b2-86c9-69b6a15172fb service nova] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Received event network-vif-deleted-21583970-0c3e-4369-b110-f8170f23c9fc {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.389258] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.730s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.389258] env[61648]: ERROR nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Traceback (most recent call last): [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self.driver.spawn(context, instance, image_meta, [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 930.389258] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] vm_ref = self.build_virtual_machine(instance, [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] for vif in network_info: [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] return self._sync_wrapper(fn, *args, **kwargs) [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self.wait() [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self[:] = self._gt.wait() [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] return self._exit_event.wait() [ 930.389735] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] current.throw(*self._exc) [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] result = function(*args, **kwargs) [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] return func(*args, **kwargs) [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] raise e [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] nwinfo = self.network_api.allocate_for_instance( [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] created_port_ids = self._update_ports_for_instance( [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 930.390093] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] with excutils.save_and_reraise_exception(): [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] self.force_reraise() [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] raise self.value [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] updated_port = self._update_port( [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] _ensure_no_port_binding_failure(port) [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] raise exception.PortBindingFailed(port_id=port['id']) [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] nova.exception.PortBindingFailed: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. [ 930.390433] env[61648]: ERROR nova.compute.manager [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] [ 930.390758] env[61648]: DEBUG nova.compute.utils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 930.392593] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.854s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.395199] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Build of instance 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e was re-scheduled: Binding failed for port 8d0af576-0159-432b-a457-53de6f415b3e, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 930.395819] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 930.396200] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquiring lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.396464] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Acquired lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.396624] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.482929] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Releasing lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.483530] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 930.483787] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.484165] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eda9ed91-fe67-479e-8061-17a685e5aa3e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.495831] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7601ea1-dd65-42bb-8e39-f70bb83897d6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.523884] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88da5043-2922-4ef3-b92b-2a67894f4626 could not be found. [ 930.524089] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.524304] env[61648]: INFO nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Took 0.04 seconds to destroy the instance on the hypervisor. [ 930.524552] env[61648]: DEBUG oslo.service.loopingcall [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.524760] env[61648]: DEBUG nova.compute.manager [-] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 930.524853] env[61648]: DEBUG nova.network.neutron [-] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 930.541540] env[61648]: DEBUG nova.network.neutron [-] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.673852] env[61648]: ERROR nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 930.673852] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.673852] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 930.673852] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 930.673852] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.673852] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.673852] env[61648]: ERROR nova.compute.manager raise self.value [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 930.673852] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 930.673852] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.673852] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 930.674464] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.674464] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 930.674464] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 930.674464] env[61648]: ERROR nova.compute.manager [ 930.674464] env[61648]: Traceback (most recent call last): [ 930.674464] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 930.674464] env[61648]: listener.cb(fileno) [ 930.674464] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 930.674464] env[61648]: result = function(*args, **kwargs) [ 930.674464] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 930.674464] env[61648]: return func(*args, **kwargs) [ 930.674464] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 930.674464] env[61648]: raise e [ 930.674464] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.674464] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 930.674464] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 930.674464] env[61648]: created_port_ids = self._update_ports_for_instance( [ 930.674464] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 930.674464] env[61648]: with excutils.save_and_reraise_exception(): [ 930.674464] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.674464] env[61648]: self.force_reraise() [ 930.674464] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.674464] env[61648]: raise self.value [ 930.674464] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 930.674464] env[61648]: updated_port = self._update_port( [ 930.674464] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.674464] env[61648]: _ensure_no_port_binding_failure(port) [ 930.674464] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.674464] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 930.675734] env[61648]: nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 930.675734] env[61648]: Removing descriptor: 19 [ 930.677315] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 930.704480] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.704837] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.705008] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.705200] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.705540] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.705540] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.705694] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.705848] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.706021] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.706390] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.706621] env[61648]: DEBUG nova.virt.hardware [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.707524] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b74756-efa6-4899-8653-f5fbd1f929ed {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.716565] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdc0647-2787-4a43-aa6b-32cb9b49febc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.731759] env[61648]: ERROR nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Traceback (most recent call last): [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] yield resources [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self.driver.spawn(context, instance, image_meta, [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] vm_ref = self.build_virtual_machine(instance, [ 930.731759] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] vif_infos = vmwarevif.get_vif_info(self._session, [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] for vif in network_info: [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] return self._sync_wrapper(fn, *args, **kwargs) [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self.wait() [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self[:] = self._gt.wait() [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] return self._exit_event.wait() [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 930.732176] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] current.throw(*self._exc) [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] result = function(*args, **kwargs) [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] return func(*args, **kwargs) [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] raise e [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] nwinfo = self.network_api.allocate_for_instance( [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] created_port_ids = self._update_ports_for_instance( [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] with excutils.save_and_reraise_exception(): [ 930.732579] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self.force_reraise() [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] raise self.value [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] updated_port = self._update_port( [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] _ensure_no_port_binding_failure(port) [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] raise exception.PortBindingFailed(port_id=port['id']) [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 930.732966] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] [ 930.732966] env[61648]: INFO nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Terminating instance [ 930.734700] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.734908] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.735129] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.919654] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.993127] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.046131] env[61648]: DEBUG nova.network.neutron [-] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.075087] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e94efd0-c838-4305-90ef-679145267830 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.081404] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e569ff4d-c9f5-41a4-8003-58cf30411f13 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.114124] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1019d1cc-c831-487d-85b5-c299ca44ae4f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.121210] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25641e1-104b-4847-a57d-b2594c5fd6fa {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.134970] env[61648]: DEBUG nova.compute.provider_tree [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.256034] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.335251] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.501272] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Releasing lock "refresh_cache-8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.501272] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 931.501272] env[61648]: DEBUG nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 931.501272] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.515626] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.548405] env[61648]: INFO nova.compute.manager [-] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Took 1.02 seconds to deallocate network for instance. [ 931.550702] env[61648]: DEBUG nova.compute.claims [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 931.551044] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.639089] env[61648]: DEBUG nova.scheduler.client.report [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.843057] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.843057] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 931.843057] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.843057] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a0cef22-a900-4289-8154-33a36031409c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.856016] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d9ffac-1995-4c92-a237-6fea5e5c1ba1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.873270] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9389e15-e3af-416d-83f6-800af5a6aecf could not be found. [ 931.873790] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.874143] env[61648]: INFO nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 931.874530] env[61648]: DEBUG oslo.service.loopingcall [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.874901] env[61648]: DEBUG nova.compute.manager [-] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 931.875128] env[61648]: DEBUG nova.network.neutron [-] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.890853] env[61648]: DEBUG nova.network.neutron [-] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.018904] env[61648]: DEBUG nova.network.neutron [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.149960] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.761s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.151032] env[61648]: ERROR nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Traceback (most recent call last): [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self.driver.spawn(context, instance, image_meta, [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] vm_ref = self.build_virtual_machine(instance, [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 932.151032] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] for vif in network_info: [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] return self._sync_wrapper(fn, *args, **kwargs) [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self.wait() [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self[:] = self._gt.wait() [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] return self._exit_event.wait() [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] current.throw(*self._exc) [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 932.151618] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] result = function(*args, **kwargs) [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] return func(*args, **kwargs) [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] raise e [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] nwinfo = self.network_api.allocate_for_instance( [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] created_port_ids = self._update_ports_for_instance( [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] with excutils.save_and_reraise_exception(): [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] self.force_reraise() [ 932.152171] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] raise self.value [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] updated_port = self._update_port( [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] _ensure_no_port_binding_failure(port) [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] raise exception.PortBindingFailed(port_id=port['id']) [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] nova.exception.PortBindingFailed: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. [ 932.152722] env[61648]: ERROR nova.compute.manager [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] [ 932.154883] env[61648]: DEBUG nova.compute.utils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 932.158015] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.337s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.158499] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Build of instance 6c2f92c0-1346-4c9a-aa96-168b0987bb2f was re-scheduled: Binding failed for port 15e1d73c-5798-4865-8d50-0b5eef74fbed, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 932.159129] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 932.159480] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquiring lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.159756] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Acquired lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.160051] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.322450] env[61648]: DEBUG nova.compute.manager [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Received event network-changed-eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.322622] env[61648]: DEBUG nova.compute.manager [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Refreshing instance network info cache due to event network-changed-eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 932.322826] env[61648]: DEBUG oslo_concurrency.lockutils [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] Acquiring lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.322965] env[61648]: DEBUG oslo_concurrency.lockutils [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] Acquired lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.323137] env[61648]: DEBUG nova.network.neutron [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Refreshing network info cache for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.395183] env[61648]: DEBUG nova.network.neutron [-] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.521811] env[61648]: INFO nova.compute.manager [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] [instance: 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e] Took 1.02 seconds to deallocate network for instance. [ 932.680675] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.768016] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.840961] env[61648]: DEBUG nova.network.neutron [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.893981] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16ed116-2346-41c4-b7eb-621e3665c3e6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.898226] env[61648]: INFO nova.compute.manager [-] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Took 1.02 seconds to deallocate network for instance. [ 932.902128] env[61648]: DEBUG nova.compute.claims [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 932.902304] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.903233] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b53247-cb32-4624-8127-a884b8a3df24 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.932481] env[61648]: DEBUG nova.network.neutron [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.934012] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490a0a17-e93f-4a87-b999-7272d3d35c9a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.942174] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefa60e1-ea3d-4530-8dab-f30b52693e5e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.956360] env[61648]: DEBUG nova.compute.provider_tree [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.270636] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Releasing lock "refresh_cache-6c2f92c0-1346-4c9a-aa96-168b0987bb2f" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.270884] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 933.271082] env[61648]: DEBUG nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 933.271269] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.286505] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.437568] env[61648]: DEBUG oslo_concurrency.lockutils [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] Releasing lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.437823] env[61648]: DEBUG nova.compute.manager [req-3003857e-58c0-4670-9268-20ead0b72355 req-120e2c78-881c-4aa4-a0f1-5a6f4e187ee9 service nova] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Received event network-vif-deleted-eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.458985] env[61648]: DEBUG nova.scheduler.client.report [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.550430] env[61648]: INFO nova.scheduler.client.report [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Deleted allocations for instance 8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e [ 933.788914] env[61648]: DEBUG nova.network.neutron [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.964194] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.809s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.964884] env[61648]: ERROR nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Traceback (most recent call last): [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self.driver.spawn(context, instance, image_meta, [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] vm_ref = self.build_virtual_machine(instance, [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] vif_infos = vmwarevif.get_vif_info(self._session, [ 933.964884] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] for vif in network_info: [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] return self._sync_wrapper(fn, *args, **kwargs) [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self.wait() [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self[:] = self._gt.wait() [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] return self._exit_event.wait() [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] current.throw(*self._exc) [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 933.965235] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] result = function(*args, **kwargs) [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] return func(*args, **kwargs) [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] raise e [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] nwinfo = self.network_api.allocate_for_instance( [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] created_port_ids = self._update_ports_for_instance( [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] with excutils.save_and_reraise_exception(): [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] self.force_reraise() [ 933.965579] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] raise self.value [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] updated_port = self._update_port( [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] _ensure_no_port_binding_failure(port) [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] raise exception.PortBindingFailed(port_id=port['id']) [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] nova.exception.PortBindingFailed: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. [ 933.965922] env[61648]: ERROR nova.compute.manager [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] [ 933.965922] env[61648]: DEBUG nova.compute.utils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 933.966899] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.060s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.970234] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Build of instance 3c33a19e-211a-43f6-ae79-596f1c070a76 was re-scheduled: Binding failed for port 40e3b1d4-7b8b-42ef-8868-a6113dc4ee5e, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 933.970715] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 933.970955] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.971119] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.971297] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.058312] env[61648]: DEBUG oslo_concurrency.lockutils [None req-5d0bb40e-9359-4ccd-ae44-ba7ef956fd1c tempest-ServerTagsTestJSON-1439209647 tempest-ServerTagsTestJSON-1439209647-project-member] Lock "8e0dd5f2-945f-41ea-83b9-fbf69c3c8b3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.536s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.292290] env[61648]: INFO nova.compute.manager [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] [instance: 6c2f92c0-1346-4c9a-aa96-168b0987bb2f] Took 1.02 seconds to deallocate network for instance. [ 934.382843] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.383066] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.383339] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 934.383339] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 934.497306] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.586686] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.643091] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb010e4-026a-4e60-994c-51f3cbc1b0ce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.651289] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd490bbd-ba7e-49d3-9ff2-91b4347fc55d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.682138] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b11a19-0a29-4d0c-8a2a-a38f53abd23b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.689353] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bb5715-1813-4590-b16c-c1a0c88092f6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.702644] env[61648]: DEBUG nova.compute.provider_tree [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.887773] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 934.887954] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 934.888104] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 934.888239] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Skipping network cache update for instance because it is Building. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 934.888364] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Didn't find any instances for network info cache update. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 934.888577] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.888739] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.888894] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.889057] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.889222] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.889452] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.889522] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 934.889641] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.091472] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-3c33a19e-211a-43f6-ae79-596f1c070a76" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.091472] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 935.091472] env[61648]: DEBUG nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 935.091472] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.108277] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 935.209665] env[61648]: DEBUG nova.scheduler.client.report [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 935.333105] env[61648]: INFO nova.scheduler.client.report [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Deleted allocations for instance 6c2f92c0-1346-4c9a-aa96-168b0987bb2f [ 935.393818] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.614271] env[61648]: DEBUG nova.network.neutron [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.714016] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.747s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.715234] env[61648]: ERROR nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Traceback (most recent call last): [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self.driver.spawn(context, instance, image_meta, [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] vm_ref = self.build_virtual_machine(instance, [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] vif_infos = vmwarevif.get_vif_info(self._session, [ 935.715234] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] for vif in network_info: [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] return self._sync_wrapper(fn, *args, **kwargs) [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self.wait() [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self[:] = self._gt.wait() [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] return self._exit_event.wait() [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] current.throw(*self._exc) [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 935.716832] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] result = function(*args, **kwargs) [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] return func(*args, **kwargs) [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] raise e [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] nwinfo = self.network_api.allocate_for_instance( [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] created_port_ids = self._update_ports_for_instance( [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] with excutils.save_and_reraise_exception(): [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] self.force_reraise() [ 935.717245] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] raise self.value [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] updated_port = self._update_port( [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] _ensure_no_port_binding_failure(port) [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] raise exception.PortBindingFailed(port_id=port['id']) [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] nova.exception.PortBindingFailed: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. [ 935.717584] env[61648]: ERROR nova.compute.manager [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] [ 935.720239] env[61648]: DEBUG nova.compute.utils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 935.720239] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.856s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.727627] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Build of instance 21e36276-c4d1-4941-a216-22fee34dcb29 was re-scheduled: Binding failed for port dc4c8a9d-47f1-4670-98aa-a5de7b14453e, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 935.727627] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 935.727627] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquiring lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.727627] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Acquired lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.728010] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.843833] env[61648]: DEBUG oslo_concurrency.lockutils [None req-9255bfe2-dbbf-450f-bdbe-b7f983280647 tempest-ImagesTestJSON-1404667623 tempest-ImagesTestJSON-1404667623-project-member] Lock "6c2f92c0-1346-4c9a-aa96-168b0987bb2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.290s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.115912] env[61648]: INFO nova.compute.manager [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 3c33a19e-211a-43f6-ae79-596f1c070a76] Took 1.03 seconds to deallocate network for instance. [ 936.261019] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.376678] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.386884] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0baa6bad-22f1-42e3-a2ad-684e4d194cdb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.396625] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1120cdab-a488-4b4c-b32f-5340c0631fdd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.433758] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021c17ab-fff8-4b99-80c5-0e13b49f7d88 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.441123] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0c67d4-bb14-4cbf-a3f4-9bd6afb8ecae {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.454744] env[61648]: DEBUG nova.compute.provider_tree [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.879492] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Releasing lock "refresh_cache-21e36276-c4d1-4941-a216-22fee34dcb29" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.879792] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 936.879901] env[61648]: DEBUG nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 936.880096] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.895669] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.958550] env[61648]: DEBUG nova.scheduler.client.report [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 937.170747] env[61648]: INFO nova.scheduler.client.report [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Deleted allocations for instance 3c33a19e-211a-43f6-ae79-596f1c070a76 [ 937.398541] env[61648]: DEBUG nova.network.neutron [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.464279] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.744s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.464830] env[61648]: ERROR nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Traceback (most recent call last): [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self.driver.spawn(context, instance, image_meta, [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] vm_ref = self.build_virtual_machine(instance, [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] vif_infos = vmwarevif.get_vif_info(self._session, [ 937.464830] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] for vif in network_info: [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] return self._sync_wrapper(fn, *args, **kwargs) [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self.wait() [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self[:] = self._gt.wait() [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] return self._exit_event.wait() [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] current.throw(*self._exc) [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 937.465239] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] result = function(*args, **kwargs) [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] return func(*args, **kwargs) [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] raise e [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] nwinfo = self.network_api.allocate_for_instance( [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] created_port_ids = self._update_ports_for_instance( [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] with excutils.save_and_reraise_exception(): [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] self.force_reraise() [ 937.465702] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] raise self.value [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] updated_port = self._update_port( [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] _ensure_no_port_binding_failure(port) [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] raise exception.PortBindingFailed(port_id=port['id']) [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] nova.exception.PortBindingFailed: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. [ 937.466172] env[61648]: ERROR nova.compute.manager [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] [ 937.466172] env[61648]: DEBUG nova.compute.utils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 937.466696] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.971s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.468117] env[61648]: INFO nova.compute.claims [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.470596] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Build of instance e721a05d-86fc-4c0e-839a-107d34ec9cdb was re-scheduled: Binding failed for port 1e794ac3-2f11-44a3-b311-0efc6c209043, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 937.471559] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 937.471559] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquiring lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.471559] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Acquired lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.471559] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.685515] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6102631e-9021-4822-a3d4-fefe7d44a4ab tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "3c33a19e-211a-43f6-ae79-596f1c070a76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.368s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.901301] env[61648]: INFO nova.compute.manager [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] [instance: 21e36276-c4d1-4941-a216-22fee34dcb29] Took 1.02 seconds to deallocate network for instance. [ 937.992163] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.085206] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.589503] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Releasing lock "refresh_cache-e721a05d-86fc-4c0e-839a-107d34ec9cdb" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.589725] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 938.590939] env[61648]: DEBUG nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 938.591194] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.597990] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52433ae6-042e-42ea-bd5a-1d16ae86f28c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.605560] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05021d5d-f594-4856-a3b7-8a18b469e30e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.635882] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.638879] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f925160a-928d-4ce1-b3e4-996c7c5ccb6f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.647951] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e72e62d-9197-4065-9fe7-1ee882778c55 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.662892] env[61648]: DEBUG nova.compute.provider_tree [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.932337] env[61648]: INFO nova.scheduler.client.report [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Deleted allocations for instance 21e36276-c4d1-4941-a216-22fee34dcb29 [ 939.138784] env[61648]: DEBUG nova.network.neutron [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.165568] env[61648]: DEBUG nova.scheduler.client.report [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.378502] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "62c944e1-7d39-4ef7-9994-0436008e59f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.378787] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "62c944e1-7d39-4ef7-9994-0436008e59f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.442448] env[61648]: DEBUG oslo_concurrency.lockutils [None req-d1ba4ff8-2a50-4ac1-8605-3358540f2299 tempest-AttachVolumeTestJSON-1342409557 tempest-AttachVolumeTestJSON-1342409557-project-member] Lock "21e36276-c4d1-4941-a216-22fee34dcb29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.958s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.641476] env[61648]: INFO nova.compute.manager [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] [instance: e721a05d-86fc-4c0e-839a-107d34ec9cdb] Took 1.05 seconds to deallocate network for instance. [ 939.674709] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.675302] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 939.681104] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.894s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.685079] env[61648]: INFO nova.compute.claims [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.882514] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 940.191023] env[61648]: DEBUG nova.compute.utils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 940.191797] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 940.192168] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 940.294542] env[61648]: DEBUG nova.policy [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e4363061c3249479d35afbe89f06ba5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa387a8a00424cffbf191d9b37f0fa29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 940.412045] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.667670] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Successfully created port: a5115d34-f8ac-423b-933b-95f267888651 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.681909] env[61648]: INFO nova.scheduler.client.report [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Deleted allocations for instance e721a05d-86fc-4c0e-839a-107d34ec9cdb [ 940.699108] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 940.814052] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4cef18-4cab-4e83-949f-07f420e8b918 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.824216] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ba7747-b042-4292-8de3-4126d9668067 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.860023] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0edfde-fc04-4dd5-bbfc-e524d0f51808 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.865185] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced53ca5-9446-4220-bc61-6728b4efb950 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.878854] env[61648]: DEBUG nova.compute.provider_tree [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.189190] env[61648]: DEBUG oslo_concurrency.lockutils [None req-a8bcf9b9-0cf1-4e05-af07-2f5903654bd6 tempest-ServerPasswordTestJSON-1570096619 tempest-ServerPasswordTestJSON-1570096619-project-member] Lock "e721a05d-86fc-4c0e-839a-107d34ec9cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.460s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.382293] env[61648]: DEBUG nova.scheduler.client.report [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.714357] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 941.735778] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.735921] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.736122] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.736316] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.736459] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.736604] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.736809] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.736967] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.737198] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.737302] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.737529] env[61648]: DEBUG nova.virt.hardware [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.738342] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4749e35-42a8-4cfa-9007-c552a2dfbe2c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.746478] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65d102d-ec54-4d94-959c-76b608208c01 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.887360] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.887985] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 941.890953] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.916s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.395519] env[61648]: DEBUG nova.compute.utils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.402797] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 942.402797] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 942.460863] env[61648]: DEBUG nova.compute.manager [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Received event network-changed-a5115d34-f8ac-423b-933b-95f267888651 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.461052] env[61648]: DEBUG nova.compute.manager [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Refreshing instance network info cache due to event network-changed-a5115d34-f8ac-423b-933b-95f267888651. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.461271] env[61648]: DEBUG oslo_concurrency.lockutils [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] Acquiring lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.461410] env[61648]: DEBUG oslo_concurrency.lockutils [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] Acquired lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.461594] env[61648]: DEBUG nova.network.neutron [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Refreshing network info cache for port a5115d34-f8ac-423b-933b-95f267888651 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.467236] env[61648]: DEBUG nova.policy [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1640f7eea62d4cecb3957335c02d4e3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'acae4fa055d943c4abab9264a1f1683f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 942.550661] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21b822a-dcce-477b-b082-d05afca01ba4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.561346] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a350f0-dc42-4747-a5a2-d6465b66ff22 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.597434] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8efa436-9912-4124-8dbb-bf5401c3fc8e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.604516] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc959d3d-b964-4344-b12d-3bb91a84c5c1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.618933] env[61648]: DEBUG nova.compute.provider_tree [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.727712] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquiring lock "f4d4f44d-a53a-44e1-ad72-87a0694b395c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.727949] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Lock "f4d4f44d-a53a-44e1-ad72-87a0694b395c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.753113] env[61648]: ERROR nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 942.753113] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 942.753113] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 942.753113] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 942.753113] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 942.753113] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 942.753113] env[61648]: ERROR nova.compute.manager raise self.value [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 942.753113] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 942.753113] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 942.753113] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 942.753674] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 942.753674] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 942.753674] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 942.753674] env[61648]: ERROR nova.compute.manager [ 942.753674] env[61648]: Traceback (most recent call last): [ 942.753674] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 942.753674] env[61648]: listener.cb(fileno) [ 942.753674] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 942.753674] env[61648]: result = function(*args, **kwargs) [ 942.753674] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 942.753674] env[61648]: return func(*args, **kwargs) [ 942.753674] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 942.753674] env[61648]: raise e [ 942.753674] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 942.753674] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 942.753674] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 942.753674] env[61648]: created_port_ids = self._update_ports_for_instance( [ 942.753674] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 942.753674] env[61648]: with excutils.save_and_reraise_exception(): [ 942.753674] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 942.753674] env[61648]: self.force_reraise() [ 942.753674] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 942.753674] env[61648]: raise self.value [ 942.753674] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 942.753674] env[61648]: updated_port = self._update_port( [ 942.753674] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 942.753674] env[61648]: _ensure_no_port_binding_failure(port) [ 942.753674] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 942.753674] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 942.754671] env[61648]: nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 942.754671] env[61648]: Removing descriptor: 19 [ 942.754671] env[61648]: ERROR nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Traceback (most recent call last): [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] yield resources [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self.driver.spawn(context, instance, image_meta, [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 942.754671] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] vm_ref = self.build_virtual_machine(instance, [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] vif_infos = vmwarevif.get_vif_info(self._session, [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] for vif in network_info: [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return self._sync_wrapper(fn, *args, **kwargs) [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self.wait() [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self[:] = self._gt.wait() [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return self._exit_event.wait() [ 942.755177] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] result = hub.switch() [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return self.greenlet.switch() [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] result = function(*args, **kwargs) [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return func(*args, **kwargs) [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] raise e [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] nwinfo = self.network_api.allocate_for_instance( [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 942.755601] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] created_port_ids = self._update_ports_for_instance( [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] with excutils.save_and_reraise_exception(): [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self.force_reraise() [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] raise self.value [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] updated_port = self._update_port( [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] _ensure_no_port_binding_failure(port) [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 942.756046] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] raise exception.PortBindingFailed(port_id=port['id']) [ 942.756442] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 942.756442] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] [ 942.756442] env[61648]: INFO nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Terminating instance [ 942.757562] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquiring lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.785404] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Successfully created port: 1bc6bded-8e7d-4041-ae38-d59dae37a0ae {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 942.906959] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 942.985626] env[61648]: DEBUG nova.network.neutron [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.115494] env[61648]: DEBUG nova.network.neutron [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.121970] env[61648]: DEBUG nova.scheduler.client.report [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.230240] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 943.624016] env[61648]: DEBUG oslo_concurrency.lockutils [req-65b7b2ec-c06a-4264-b8e0-ca78a367f8f9 req-21515904-6ff1-4eb7-beb2-28d74fcf4356 service nova] Releasing lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.624478] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquired lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.624667] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.630593] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.740s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.631627] env[61648]: ERROR nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Traceback (most recent call last): [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self.driver.spawn(context, instance, image_meta, [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self._vmops.spawn(context, instance, image_meta, injected_files, [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] vm_ref = self.build_virtual_machine(instance, [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] vif_infos = vmwarevif.get_vif_info(self._session, [ 943.631627] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] for vif in network_info: [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return self._sync_wrapper(fn, *args, **kwargs) [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self.wait() [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self[:] = self._gt.wait() [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return self._exit_event.wait() [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] result = hub.switch() [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 943.632043] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return self.greenlet.switch() [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] result = function(*args, **kwargs) [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] return func(*args, **kwargs) [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] raise e [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] nwinfo = self.network_api.allocate_for_instance( [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] created_port_ids = self._update_ports_for_instance( [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] with excutils.save_and_reraise_exception(): [ 943.632430] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] self.force_reraise() [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] raise self.value [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] updated_port = self._update_port( [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] _ensure_no_port_binding_failure(port) [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] raise exception.PortBindingFailed(port_id=port['id']) [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] nova.exception.PortBindingFailed: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. [ 943.632840] env[61648]: ERROR nova.compute.manager [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] [ 943.633229] env[61648]: DEBUG nova.compute.utils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 943.633768] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.082s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.637461] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Build of instance e7c41d2a-1aed-44e9-959b-2369a8d66547 was re-scheduled: Binding failed for port cd032fd6-f80f-433e-ab30-72b6647f9fed, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 943.637905] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 943.638143] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.638293] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.638453] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.708912] env[61648]: DEBUG nova.compute.manager [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Received event network-changed-1bc6bded-8e7d-4041-ae38-d59dae37a0ae {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.709140] env[61648]: DEBUG nova.compute.manager [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Refreshing instance network info cache due to event network-changed-1bc6bded-8e7d-4041-ae38-d59dae37a0ae. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.709353] env[61648]: DEBUG oslo_concurrency.lockutils [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] Acquiring lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.709497] env[61648]: DEBUG oslo_concurrency.lockutils [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] Acquired lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.709663] env[61648]: DEBUG nova.network.neutron [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Refreshing network info cache for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.758969] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.901458] env[61648]: ERROR nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 943.901458] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 943.901458] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 943.901458] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 943.901458] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 943.901458] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 943.901458] env[61648]: ERROR nova.compute.manager raise self.value [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 943.901458] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 943.901458] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 943.901458] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 943.901995] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 943.901995] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 943.901995] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 943.901995] env[61648]: ERROR nova.compute.manager [ 943.901995] env[61648]: Traceback (most recent call last): [ 943.901995] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 943.901995] env[61648]: listener.cb(fileno) [ 943.901995] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 943.901995] env[61648]: result = function(*args, **kwargs) [ 943.901995] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 943.901995] env[61648]: return func(*args, **kwargs) [ 943.901995] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 943.901995] env[61648]: raise e [ 943.901995] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 943.901995] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 943.901995] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 943.901995] env[61648]: created_port_ids = self._update_ports_for_instance( [ 943.901995] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 943.901995] env[61648]: with excutils.save_and_reraise_exception(): [ 943.901995] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 943.901995] env[61648]: self.force_reraise() [ 943.901995] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 943.901995] env[61648]: raise self.value [ 943.901995] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 943.901995] env[61648]: updated_port = self._update_port( [ 943.901995] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 943.901995] env[61648]: _ensure_no_port_binding_failure(port) [ 943.901995] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 943.901995] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 943.902850] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 943.902850] env[61648]: Removing descriptor: 16 [ 943.917109] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 943.942218] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.942473] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.942650] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.942868] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.943092] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.943264] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.943478] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.943638] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.943845] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.944071] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.944255] env[61648]: DEBUG nova.virt.hardware [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.945513] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5ed5e9-730d-40ec-9d05-2374fcc5946b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.953463] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d7fda4-1c6a-4a7c-bcfe-031cc11c0505 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.967072] env[61648]: ERROR nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Traceback (most recent call last): [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] yield resources [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self.driver.spawn(context, instance, image_meta, [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] vm_ref = self.build_virtual_machine(instance, [ 943.967072] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] for vif in network_info: [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] return self._sync_wrapper(fn, *args, **kwargs) [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self.wait() [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self[:] = self._gt.wait() [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] return self._exit_event.wait() [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 943.967462] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] current.throw(*self._exc) [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] result = function(*args, **kwargs) [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] return func(*args, **kwargs) [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] raise e [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] nwinfo = self.network_api.allocate_for_instance( [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] created_port_ids = self._update_ports_for_instance( [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] with excutils.save_and_reraise_exception(): [ 943.967820] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self.force_reraise() [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] raise self.value [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] updated_port = self._update_port( [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] _ensure_no_port_binding_failure(port) [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] raise exception.PortBindingFailed(port_id=port['id']) [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 943.968223] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] [ 943.968223] env[61648]: INFO nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Terminating instance [ 943.969502] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.147224] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.170960] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.222767] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.245979] env[61648]: DEBUG nova.network.neutron [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.283731] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a1af3d-5d07-49dd-89ff-c277a662a6fb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.292374] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495e079e-41d2-4888-a874-d790ef363b24 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.300120] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.331237] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-e7c41d2a-1aed-44e9-959b-2369a8d66547" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.331653] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 944.332053] env[61648]: DEBUG nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 944.332382] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.335201] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccdd3e5-0425-4d32-aa58-f88e671672d2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.343957] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5000e82d-7570-4236-a2db-7fbb8b476b46 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.361075] env[61648]: DEBUG nova.compute.provider_tree [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.365054] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.393311] env[61648]: DEBUG nova.network.neutron [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.492600] env[61648]: DEBUG nova.compute.manager [req-186c0062-07a3-4c5c-b944-eb6aed856d31 req-cc1950ed-dcfc-481a-9b05-30f15daee56a service nova] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Received event network-vif-deleted-a5115d34-f8ac-423b-933b-95f267888651 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.725422] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Releasing lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.725873] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 944.726040] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.726361] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c269b5d-18ec-4e28-a496-8c8315b42ee2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.736012] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605fc09a-5cfb-4389-a2cf-988c8862a682 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.756816] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e3e6e83-93dc-4e63-9955-a3519e6a4df1 could not be found. [ 944.757038] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.757226] env[61648]: INFO nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 944.757468] env[61648]: DEBUG oslo.service.loopingcall [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.757682] env[61648]: DEBUG nova.compute.manager [-] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 944.757777] env[61648]: DEBUG nova.network.neutron [-] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.775337] env[61648]: DEBUG nova.network.neutron [-] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.867794] env[61648]: DEBUG nova.scheduler.client.report [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 944.871442] env[61648]: DEBUG nova.network.neutron [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.895987] env[61648]: DEBUG oslo_concurrency.lockutils [req-1dabe74b-77eb-4cfd-ba4d-cbed75023a54 req-6da46db9-f613-40fb-9b63-65a69210995b service nova] Releasing lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.896429] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquired lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.896608] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.284446] env[61648]: DEBUG nova.network.neutron [-] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.373848] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.740s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.374483] env[61648]: ERROR nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Traceback (most recent call last): [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self.driver.spawn(context, instance, image_meta, [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self._vmops.spawn(context, instance, image_meta, injected_files, [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] vm_ref = self.build_virtual_machine(instance, [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] vif_infos = vmwarevif.get_vif_info(self._session, [ 945.374483] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] for vif in network_info: [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] return self._sync_wrapper(fn, *args, **kwargs) [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self.wait() [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self[:] = self._gt.wait() [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] return self._exit_event.wait() [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] current.throw(*self._exc) [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 945.374904] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] result = function(*args, **kwargs) [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] return func(*args, **kwargs) [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] raise e [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] nwinfo = self.network_api.allocate_for_instance( [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] created_port_ids = self._update_ports_for_instance( [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] with excutils.save_and_reraise_exception(): [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] self.force_reraise() [ 945.375392] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] raise self.value [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] updated_port = self._update_port( [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] _ensure_no_port_binding_failure(port) [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] raise exception.PortBindingFailed(port_id=port['id']) [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] nova.exception.PortBindingFailed: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. [ 945.375824] env[61648]: ERROR nova.compute.manager [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] [ 945.375824] env[61648]: DEBUG nova.compute.utils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 945.376466] env[61648]: INFO nova.compute.manager [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: e7c41d2a-1aed-44e9-959b-2369a8d66547] Took 1.04 seconds to deallocate network for instance. [ 945.379020] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Build of instance 88da5043-2922-4ef3-b92b-2a67894f4626 was re-scheduled: Binding failed for port 21583970-0c3e-4369-b110-f8170f23c9fc, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 945.379318] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 945.379545] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquiring lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.379691] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Acquired lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.379844] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.380688] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.478s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.417575] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 945.516467] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.735112] env[61648]: DEBUG nova.compute.manager [req-eaa0f36d-3564-418a-9f3b-87c6259d6fb1 req-54bb330f-5ea7-4892-81aa-ce37c6fd221e service nova] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Received event network-vif-deleted-1bc6bded-8e7d-4041-ae38-d59dae37a0ae {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.786631] env[61648]: INFO nova.compute.manager [-] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Took 1.03 seconds to deallocate network for instance. [ 945.788834] env[61648]: DEBUG nova.compute.claims [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 945.789016] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.915407] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.008493] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.010925] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b558d303-2a99-44d0-b646-e96fc0530d5e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.020355] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Releasing lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.020750] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 946.020937] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 946.021837] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94365af2-35e2-4c39-996c-96910f204db7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.025630] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3106ca7-ee3d-418b-89e1-689cda5849d9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.060198] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba9c48-e72a-45f1-95f7-47c745815d03 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.073171] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bae1877-677f-41b4-a0cb-69ac11d9ebd4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.080564] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62aa8ca-ec4a-45d4-bf01-78d4af289c06 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.087832] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5984ee2f-103a-4999-9229-34c4d21779f1 could not be found. [ 946.088084] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.088267] env[61648]: INFO nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Took 0.07 seconds to destroy the instance on the hypervisor. [ 946.088490] env[61648]: DEBUG oslo.service.loopingcall [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.088975] env[61648]: DEBUG nova.compute.manager [-] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 946.089089] env[61648]: DEBUG nova.network.neutron [-] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.098864] env[61648]: DEBUG nova.compute.provider_tree [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.111508] env[61648]: DEBUG nova.network.neutron [-] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.422863] env[61648]: INFO nova.scheduler.client.report [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Deleted allocations for instance e7c41d2a-1aed-44e9-959b-2369a8d66547 [ 946.516218] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Releasing lock "refresh_cache-88da5043-2922-4ef3-b92b-2a67894f4626" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.516434] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 946.516616] env[61648]: DEBUG nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 946.516781] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.538417] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.601387] env[61648]: DEBUG nova.scheduler.client.report [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.615399] env[61648]: DEBUG nova.network.neutron [-] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.931154] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0159e83b-9ce7-41be-8a3e-30cd37f207de tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "e7c41d2a-1aed-44e9-959b-2369a8d66547" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.527s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.042743] env[61648]: DEBUG nova.network.neutron [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.246241] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.727s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.246241] env[61648]: ERROR nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Traceback (most recent call last): [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self.driver.spawn(context, instance, image_meta, [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 947.246241] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] vm_ref = self.build_virtual_machine(instance, [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] vif_infos = vmwarevif.get_vif_info(self._session, [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] for vif in network_info: [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] return self._sync_wrapper(fn, *args, **kwargs) [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self.wait() [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self[:] = self._gt.wait() [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] return self._exit_event.wait() [ 947.250849] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] current.throw(*self._exc) [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] result = function(*args, **kwargs) [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] return func(*args, **kwargs) [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] raise e [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] nwinfo = self.network_api.allocate_for_instance( [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] created_port_ids = self._update_ports_for_instance( [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 947.251641] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] with excutils.save_and_reraise_exception(): [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] self.force_reraise() [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] raise self.value [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] updated_port = self._update_port( [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] _ensure_no_port_binding_failure(port) [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] raise exception.PortBindingFailed(port_id=port['id']) [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] nova.exception.PortBindingFailed: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. [ 947.252075] env[61648]: ERROR nova.compute.manager [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] [ 947.252409] env[61648]: DEBUG nova.compute.utils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 947.252409] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.716s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.252409] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.252409] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 947.252409] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.699s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.252569] env[61648]: INFO nova.compute.claims [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.252569] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Build of instance e9389e15-e3af-416d-83f6-800af5a6aecf was re-scheduled: Binding failed for port eb6d1a4a-c1ac-4c2d-8556-3ef3ec0e7eb8, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 947.252569] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 947.252569] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.252569] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.252757] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.252757] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953ffa30-fc92-4146-a54a-61a8ea4ee168 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.252757] env[61648]: INFO nova.compute.manager [-] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Took 1.03 seconds to deallocate network for instance. [ 947.252757] env[61648]: DEBUG nova.compute.claims [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 947.252757] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.252757] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7932ac-a5e6-4bfa-b6a8-e92b568ebbd1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.252960] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcbe0d9-377b-4764-a690-f59a2f4c90b5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.252960] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc13bee0-a5f5-40bf-8dd8-5497eb2ad4cd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.252960] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181257MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 947.252960] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.548699] env[61648]: INFO nova.compute.manager [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] [instance: 88da5043-2922-4ef3-b92b-2a67894f4626] Took 1.03 seconds to deallocate network for instance. [ 947.642637] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 947.739137] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.172025] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "00170bcb-99de-4be3-aa30-10c3a381d2ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.172342] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "00170bcb-99de-4be3-aa30-10c3a381d2ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.238197] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb65ef8-1f66-4969-949e-89e93a83e39a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.241364] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-e9389e15-e3af-416d-83f6-800af5a6aecf" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.241573] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 948.241749] env[61648]: DEBUG nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 948.241929] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 948.248078] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd85541-8275-41cb-b983-e5b31147e98b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.278322] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.282391] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ac7c30-2ef2-4825-9301-915d6b76f9fe {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.286873] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d250c6a3-b1cd-4a59-b012-80aa7c33016e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.300549] env[61648]: DEBUG nova.compute.provider_tree [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.574718] env[61648]: INFO nova.scheduler.client.report [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Deleted allocations for instance 88da5043-2922-4ef3-b92b-2a67894f4626 [ 948.674398] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 948.783754] env[61648]: DEBUG nova.network.neutron [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.803041] env[61648]: DEBUG nova.scheduler.client.report [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 949.086490] env[61648]: DEBUG oslo_concurrency.lockutils [None req-49d323d8-ba2a-43e3-aa30-86b44558dc92 tempest-AttachVolumeNegativeTest-291038310 tempest-AttachVolumeNegativeTest-291038310-project-member] Lock "88da5043-2922-4ef3-b92b-2a67894f4626" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.117s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.200956] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.286410] env[61648]: INFO nova.compute.manager [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: e9389e15-e3af-416d-83f6-800af5a6aecf] Took 1.04 seconds to deallocate network for instance. [ 949.307702] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.197s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.308336] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 949.314523] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.553s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.317321] env[61648]: INFO nova.compute.claims [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.827741] env[61648]: DEBUG nova.compute.utils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.829862] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 949.830078] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.898734] env[61648]: DEBUG nova.policy [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8658758cec10421ea417eb40a1a88ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61bcab083e6b4e1da5a11cfc573e1e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.327734] env[61648]: INFO nova.scheduler.client.report [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Deleted allocations for instance e9389e15-e3af-416d-83f6-800af5a6aecf [ 950.336762] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 950.341460] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Successfully created port: 3a823114-e9e9-4617-8318-06a3b9fb425e {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.438948] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93c5bbc-c249-41e7-baac-92bf2c7aae4a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.446200] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fef3b56-be32-4736-b18c-4c911101d921 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.476531] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca394faf-dd18-45f2-b638-f26a44ce9e34 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.484132] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca53c88c-6b6d-4298-9f67-997572e5c1b9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.496148] env[61648]: DEBUG nova.compute.provider_tree [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.844494] env[61648]: DEBUG oslo_concurrency.lockutils [None req-08968861-3bdf-49d3-b37f-d5677739de1d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "e9389e15-e3af-416d-83f6-800af5a6aecf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.330s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.999228] env[61648]: DEBUG nova.scheduler.client.report [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.355956] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 951.377615] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.377904] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.378069] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.378225] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.378370] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.378516] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.378717] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.378870] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.379845] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.380084] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.380277] env[61648]: DEBUG nova.virt.hardware [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.381156] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4eadb40-9dfc-4736-967c-02f35a76cd6e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.389482] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15414a96-5a56-4890-a858-942214b1c1a2 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.504520] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.505084] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 951.508407] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.719s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.927730] env[61648]: DEBUG nova.compute.manager [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Received event network-changed-3a823114-e9e9-4617-8318-06a3b9fb425e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.927927] env[61648]: DEBUG nova.compute.manager [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Refreshing instance network info cache due to event network-changed-3a823114-e9e9-4617-8318-06a3b9fb425e. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 951.928249] env[61648]: DEBUG oslo_concurrency.lockutils [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] Acquiring lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.928333] env[61648]: DEBUG oslo_concurrency.lockutils [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] Acquired lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.928504] env[61648]: DEBUG nova.network.neutron [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Refreshing network info cache for port 3a823114-e9e9-4617-8318-06a3b9fb425e {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 952.012530] env[61648]: DEBUG nova.compute.utils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 952.014216] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 952.018080] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.079952] env[61648]: DEBUG nova.policy [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a87cddb68e84d49ac1b44f93de2cec9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1336bbf6c1148a78c53c4ceb24e6372', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 952.116232] env[61648]: ERROR nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 952.116232] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 952.116232] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 952.116232] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 952.116232] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 952.116232] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 952.116232] env[61648]: ERROR nova.compute.manager raise self.value [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 952.116232] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 952.116232] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 952.116232] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 952.116801] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 952.116801] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 952.116801] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 952.116801] env[61648]: ERROR nova.compute.manager [ 952.116801] env[61648]: Traceback (most recent call last): [ 952.116801] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 952.116801] env[61648]: listener.cb(fileno) [ 952.116801] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 952.116801] env[61648]: result = function(*args, **kwargs) [ 952.116801] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 952.116801] env[61648]: return func(*args, **kwargs) [ 952.116801] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 952.116801] env[61648]: raise e [ 952.116801] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 952.116801] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 952.116801] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 952.116801] env[61648]: created_port_ids = self._update_ports_for_instance( [ 952.116801] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 952.116801] env[61648]: with excutils.save_and_reraise_exception(): [ 952.116801] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 952.116801] env[61648]: self.force_reraise() [ 952.116801] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 952.116801] env[61648]: raise self.value [ 952.116801] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 952.116801] env[61648]: updated_port = self._update_port( [ 952.116801] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 952.116801] env[61648]: _ensure_no_port_binding_failure(port) [ 952.116801] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 952.116801] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 952.118047] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 952.118047] env[61648]: Removing descriptor: 14 [ 952.118047] env[61648]: ERROR nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Traceback (most recent call last): [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] yield resources [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self.driver.spawn(context, instance, image_meta, [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 952.118047] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] vm_ref = self.build_virtual_machine(instance, [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] vif_infos = vmwarevif.get_vif_info(self._session, [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] for vif in network_info: [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return self._sync_wrapper(fn, *args, **kwargs) [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self.wait() [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self[:] = self._gt.wait() [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return self._exit_event.wait() [ 952.118431] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] result = hub.switch() [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return self.greenlet.switch() [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] result = function(*args, **kwargs) [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return func(*args, **kwargs) [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] raise e [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] nwinfo = self.network_api.allocate_for_instance( [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 952.118828] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] created_port_ids = self._update_ports_for_instance( [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] with excutils.save_and_reraise_exception(): [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self.force_reraise() [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] raise self.value [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] updated_port = self._update_port( [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] _ensure_no_port_binding_failure(port) [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 952.119208] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] raise exception.PortBindingFailed(port_id=port['id']) [ 952.119600] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 952.119600] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] [ 952.119600] env[61648]: INFO nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Terminating instance [ 952.119600] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.126323] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1b20a6-6762-4cdc-bc20-b074e839eaf5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.138465] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfe2a0c-2aa6-44e0-8e50-7ec301c2eb08 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.174275] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1363bc81-e6cd-498b-8de1-5066505323ae {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.183161] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d808e2-5373-4bd3-987e-c9c733a0f3bd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.200351] env[61648]: DEBUG nova.compute.provider_tree [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.391220] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Successfully created port: efeeccd0-4d60-468f-9d01-ae6e44b334a3 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.420262] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "1fe8fefb-6625-4582-bc35-a3c91f88db0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.420480] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "1fe8fefb-6625-4582-bc35-a3c91f88db0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.450798] env[61648]: DEBUG nova.network.neutron [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 952.519720] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 952.576078] env[61648]: DEBUG nova.network.neutron [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.703184] env[61648]: DEBUG nova.scheduler.client.report [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.923286] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 953.079107] env[61648]: DEBUG oslo_concurrency.lockutils [req-a2d28244-a22c-4747-8069-47f45095d988 req-03c478ce-72ef-4d59-a828-a1db610a81aa service nova] Releasing lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.079513] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.079693] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.208978] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.700s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.209675] env[61648]: ERROR nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Traceback (most recent call last): [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self.driver.spawn(context, instance, image_meta, [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] vm_ref = self.build_virtual_machine(instance, [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] vif_infos = vmwarevif.get_vif_info(self._session, [ 953.209675] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] for vif in network_info: [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return self._sync_wrapper(fn, *args, **kwargs) [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self.wait() [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self[:] = self._gt.wait() [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return self._exit_event.wait() [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] result = hub.switch() [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 953.210081] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return self.greenlet.switch() [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] result = function(*args, **kwargs) [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] return func(*args, **kwargs) [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] raise e [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] nwinfo = self.network_api.allocate_for_instance( [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] created_port_ids = self._update_ports_for_instance( [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] with excutils.save_and_reraise_exception(): [ 953.210513] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] self.force_reraise() [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] raise self.value [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] updated_port = self._update_port( [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] _ensure_no_port_binding_failure(port) [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] raise exception.PortBindingFailed(port_id=port['id']) [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] nova.exception.PortBindingFailed: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. [ 953.210944] env[61648]: ERROR nova.compute.manager [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] [ 953.211317] env[61648]: DEBUG nova.compute.utils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 953.213351] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Build of instance 6e3e6e83-93dc-4e63-9955-a3519e6a4df1 was re-scheduled: Binding failed for port a5115d34-f8ac-423b-933b-95f267888651, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 953.214582] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 953.215155] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquiring lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.215530] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Acquired lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.215920] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.218735] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.094s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.296016] env[61648]: ERROR nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 953.296016] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 953.296016] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 953.296016] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 953.296016] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 953.296016] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 953.296016] env[61648]: ERROR nova.compute.manager raise self.value [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 953.296016] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 953.296016] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 953.296016] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 953.296575] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 953.296575] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 953.296575] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 953.296575] env[61648]: ERROR nova.compute.manager [ 953.296575] env[61648]: Traceback (most recent call last): [ 953.296575] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 953.296575] env[61648]: listener.cb(fileno) [ 953.296575] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 953.296575] env[61648]: result = function(*args, **kwargs) [ 953.296575] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 953.296575] env[61648]: return func(*args, **kwargs) [ 953.296575] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 953.296575] env[61648]: raise e [ 953.296575] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 953.296575] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 953.296575] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 953.296575] env[61648]: created_port_ids = self._update_ports_for_instance( [ 953.296575] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 953.296575] env[61648]: with excutils.save_and_reraise_exception(): [ 953.296575] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 953.296575] env[61648]: self.force_reraise() [ 953.296575] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 953.296575] env[61648]: raise self.value [ 953.296575] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 953.296575] env[61648]: updated_port = self._update_port( [ 953.296575] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 953.296575] env[61648]: _ensure_no_port_binding_failure(port) [ 953.296575] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 953.296575] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 953.297528] env[61648]: nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 953.297528] env[61648]: Removing descriptor: 19 [ 953.449608] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.530163] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 953.555133] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 953.555395] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 953.555549] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.555725] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 953.555868] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.556033] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 953.556259] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 953.556454] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 953.556611] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 953.556769] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 953.556935] env[61648]: DEBUG nova.virt.hardware [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 953.557848] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e2d4fc-4438-4c37-a7bd-d7342c3f239f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.566341] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af065e1-b059-44e3-a97b-db92be4c5d41 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.580173] env[61648]: ERROR nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Traceback (most recent call last): [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] yield resources [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self.driver.spawn(context, instance, image_meta, [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] vm_ref = self.build_virtual_machine(instance, [ 953.580173] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] vif_infos = vmwarevif.get_vif_info(self._session, [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] for vif in network_info: [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] return self._sync_wrapper(fn, *args, **kwargs) [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self.wait() [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self[:] = self._gt.wait() [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] return self._exit_event.wait() [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 953.580657] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] current.throw(*self._exc) [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] result = function(*args, **kwargs) [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] return func(*args, **kwargs) [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] raise e [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] nwinfo = self.network_api.allocate_for_instance( [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] created_port_ids = self._update_ports_for_instance( [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] with excutils.save_and_reraise_exception(): [ 953.581106] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self.force_reraise() [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] raise self.value [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] updated_port = self._update_port( [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] _ensure_no_port_binding_failure(port) [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] raise exception.PortBindingFailed(port_id=port['id']) [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 953.581616] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] [ 953.581616] env[61648]: INFO nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Terminating instance [ 953.582490] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquiring lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.582651] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquired lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.582815] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.599665] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.671137] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.733727] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.802386] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad3ea3b-58f4-4eb4-ab2a-8c54d2843ca4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.809802] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.812657] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2b4d58-708c-4a79-9b89-7211da27b89f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.843087] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2922e1e4-7643-4560-8902-1f33fb6456ca {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.850244] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd610ba-ee98-46f1-8064-e96b51f249a0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.863170] env[61648]: DEBUG nova.compute.provider_tree [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.956745] env[61648]: DEBUG nova.compute.manager [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Received event network-vif-deleted-3a823114-e9e9-4617-8318-06a3b9fb425e {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.956964] env[61648]: DEBUG nova.compute.manager [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Received event network-changed-efeeccd0-4d60-468f-9d01-ae6e44b334a3 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.957513] env[61648]: DEBUG nova.compute.manager [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Refreshing instance network info cache due to event network-changed-efeeccd0-4d60-468f-9d01-ae6e44b334a3. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 953.957718] env[61648]: DEBUG oslo_concurrency.lockutils [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] Acquiring lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.115344] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.173330] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.173793] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 954.173949] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.174330] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f80befc8-12b7-4515-8b83-ca04d07fa226 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.183509] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b1e0a0-c301-45b6-9e32-c8f822c3e73c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.198075] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.209682] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62c944e1-7d39-4ef7-9994-0436008e59f6 could not be found. [ 954.210017] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.210225] env[61648]: INFO nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 954.210474] env[61648]: DEBUG oslo.service.loopingcall [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.210698] env[61648]: DEBUG nova.compute.manager [-] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.210794] env[61648]: DEBUG nova.network.neutron [-] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.225178] env[61648]: DEBUG nova.network.neutron [-] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.313330] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Releasing lock "refresh_cache-6e3e6e83-93dc-4e63-9955-a3519e6a4df1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.313536] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 954.313689] env[61648]: DEBUG nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.313851] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.328931] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.366279] env[61648]: DEBUG nova.scheduler.client.report [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 954.703638] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Releasing lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.704117] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 954.704358] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.704687] env[61648]: DEBUG oslo_concurrency.lockutils [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] Acquired lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.704857] env[61648]: DEBUG nova.network.neutron [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Refreshing network info cache for port efeeccd0-4d60-468f-9d01-ae6e44b334a3 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.705983] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8b8b1d4-bb34-4d71-b557-f0435cc73e78 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.716300] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4555d88-bc48-4ad9-9be0-a66bbf0b056d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.726830] env[61648]: DEBUG nova.network.neutron [-] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.737865] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4d4f44d-a53a-44e1-ad72-87a0694b395c could not be found. [ 954.737865] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.737992] env[61648]: INFO nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 954.738232] env[61648]: DEBUG oslo.service.loopingcall [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.738682] env[61648]: DEBUG nova.compute.manager [-] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 954.738784] env[61648]: DEBUG nova.network.neutron [-] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.754573] env[61648]: DEBUG nova.network.neutron [-] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.831909] env[61648]: DEBUG nova.network.neutron [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.871331] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.653s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.871953] env[61648]: ERROR nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Traceback (most recent call last): [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self.driver.spawn(context, instance, image_meta, [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] vm_ref = self.build_virtual_machine(instance, [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 954.871953] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] for vif in network_info: [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] return self._sync_wrapper(fn, *args, **kwargs) [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self.wait() [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self[:] = self._gt.wait() [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] return self._exit_event.wait() [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] current.throw(*self._exc) [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 954.872475] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] result = function(*args, **kwargs) [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] return func(*args, **kwargs) [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] raise e [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] nwinfo = self.network_api.allocate_for_instance( [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] created_port_ids = self._update_ports_for_instance( [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] with excutils.save_and_reraise_exception(): [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] self.force_reraise() [ 954.872965] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] raise self.value [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] updated_port = self._update_port( [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] _ensure_no_port_binding_failure(port) [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] raise exception.PortBindingFailed(port_id=port['id']) [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] nova.exception.PortBindingFailed: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. [ 954.873420] env[61648]: ERROR nova.compute.manager [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] [ 954.873420] env[61648]: DEBUG nova.compute.utils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 954.873752] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.689s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.875490] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Build of instance 5984ee2f-103a-4999-9229-34c4d21779f1 was re-scheduled: Binding failed for port 1bc6bded-8e7d-4041-ae38-d59dae37a0ae, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 954.875885] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 954.876130] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquiring lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.876299] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Acquired lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.876460] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.223762] env[61648]: DEBUG nova.network.neutron [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.229151] env[61648]: INFO nova.compute.manager [-] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Took 1.02 seconds to deallocate network for instance. [ 955.234188] env[61648]: DEBUG nova.compute.claims [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 955.234451] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.256959] env[61648]: DEBUG nova.network.neutron [-] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.297718] env[61648]: DEBUG nova.network.neutron [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.334675] env[61648]: INFO nova.compute.manager [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] [instance: 6e3e6e83-93dc-4e63-9955-a3519e6a4df1] Took 1.02 seconds to deallocate network for instance. [ 955.394799] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.462040] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.760083] env[61648]: INFO nova.compute.manager [-] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Took 1.02 seconds to deallocate network for instance. [ 955.762650] env[61648]: DEBUG nova.compute.claims [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 955.762878] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.800678] env[61648]: DEBUG oslo_concurrency.lockutils [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] Releasing lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.801021] env[61648]: DEBUG nova.compute.manager [req-d7c68911-b10c-4eda-b181-6af6bcd20cd6 req-1c6ba127-17ee-4810-90ad-c985989db85f service nova] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Received event network-vif-deleted-efeeccd0-4d60-468f-9d01-ae6e44b334a3 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.964779] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Releasing lock "refresh_cache-5984ee2f-103a-4999-9229-34c4d21779f1" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.965033] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 955.965234] env[61648]: DEBUG nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 955.965412] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.980746] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.361548] env[61648]: INFO nova.scheduler.client.report [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Deleted allocations for instance 6e3e6e83-93dc-4e63-9955-a3519e6a4df1 [ 956.396900] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 6e3e6e83-93dc-4e63-9955-a3519e6a4df1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 956.483134] env[61648]: DEBUG nova.network.neutron [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.872690] env[61648]: DEBUG oslo_concurrency.lockutils [None req-8f6dff9f-3932-4613-a26c-60cddb9aa5a0 tempest-ServerAddressesNegativeTestJSON-1089743642 tempest-ServerAddressesNegativeTestJSON-1089743642-project-member] Lock "6e3e6e83-93dc-4e63-9955-a3519e6a4df1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.419s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.899737] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 5984ee2f-103a-4999-9229-34c4d21779f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 956.899934] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 62c944e1-7d39-4ef7-9994-0436008e59f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 956.900052] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance f4d4f44d-a53a-44e1-ad72-87a0694b395c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 956.986206] env[61648]: INFO nova.compute.manager [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] [instance: 5984ee2f-103a-4999-9229-34c4d21779f1] Took 1.02 seconds to deallocate network for instance. [ 957.403017] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 00170bcb-99de-4be3-aa30-10c3a381d2ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 957.906362] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 1fe8fefb-6625-4582-bc35-a3c91f88db0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 957.906768] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 957.906768] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 958.012358] env[61648]: INFO nova.scheduler.client.report [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Deleted allocations for instance 5984ee2f-103a-4999-9229-34c4d21779f1 [ 958.074755] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c456cf-1212-454f-a4a0-b76693ca939e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.083933] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2efb49-da73-4cb8-8ec1-a92b35255ed8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.114333] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d59eb1-a6ac-4826-addf-49cec3fa1e94 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.121321] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1302f817-c0ae-4640-bd95-5b402ce9164a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.134078] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.524803] env[61648]: DEBUG oslo_concurrency.lockutils [None req-2b7435f7-ae75-40d4-8d95-a2f08302e5c9 tempest-AttachInterfacesTestJSON-335874717 tempest-AttachInterfacesTestJSON-335874717-project-member] Lock "5984ee2f-103a-4999-9229-34c4d21779f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.770s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.638588] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.141429] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 959.141719] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.268s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.141935] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.941s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.143479] env[61648]: INFO nova.compute.claims [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.288486] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd0ca8f-e454-4cb4-8962-eb8c01ef8d64 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.297193] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3ca3a0-b82f-4e58-a597-d243218f659a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.333537] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcc91be-8b4c-42f7-9768-a1c2bc0d438d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.342079] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4883ee2-ef89-4593-aa87-74f5b1fb62b6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.361054] env[61648]: DEBUG nova.compute.provider_tree [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.864460] env[61648]: DEBUG nova.scheduler.client.report [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 961.376882] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.232s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.376882] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 961.377665] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.928s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.379369] env[61648]: INFO nova.compute.claims [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 961.883870] env[61648]: DEBUG nova.compute.utils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.887580] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 961.887768] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.943454] env[61648]: DEBUG nova.policy [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75788746b2214f2e8c1a8884c89ddb9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd94e7e89f424d34920f0fa92acf3226', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 962.197102] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Successfully created port: 80b302ea-03ef-448c-9093-4e9eb97bbcd1 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 962.388332] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 962.479019] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9598c27e-04b3-4b1e-95fc-4ddc86f2ede8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.486742] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6993ec91-4a1e-4d9b-b0c6-e5bc27f6c5ea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.521756] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7088e5a5-02cf-4348-9fd0-b4c4463ec154 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.529170] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef677113-985a-44cb-b1c0-d1c45944e7e0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.542323] env[61648]: DEBUG nova.compute.provider_tree [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.943837] env[61648]: DEBUG nova.compute.manager [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Received event network-changed-80b302ea-03ef-448c-9093-4e9eb97bbcd1 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.944051] env[61648]: DEBUG nova.compute.manager [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Refreshing instance network info cache due to event network-changed-80b302ea-03ef-448c-9093-4e9eb97bbcd1. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 962.944268] env[61648]: DEBUG oslo_concurrency.lockutils [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] Acquiring lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.944408] env[61648]: DEBUG oslo_concurrency.lockutils [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] Acquired lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.944623] env[61648]: DEBUG nova.network.neutron [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Refreshing network info cache for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.045709] env[61648]: DEBUG nova.scheduler.client.report [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 963.091238] env[61648]: ERROR nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 963.091238] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.091238] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 963.091238] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 963.091238] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.091238] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.091238] env[61648]: ERROR nova.compute.manager raise self.value [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 963.091238] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 963.091238] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.091238] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 963.092329] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.092329] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 963.092329] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 963.092329] env[61648]: ERROR nova.compute.manager [ 963.092329] env[61648]: Traceback (most recent call last): [ 963.092329] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 963.092329] env[61648]: listener.cb(fileno) [ 963.092329] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 963.092329] env[61648]: result = function(*args, **kwargs) [ 963.092329] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 963.092329] env[61648]: return func(*args, **kwargs) [ 963.092329] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 963.092329] env[61648]: raise e [ 963.092329] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.092329] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 963.092329] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 963.092329] env[61648]: created_port_ids = self._update_ports_for_instance( [ 963.092329] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 963.092329] env[61648]: with excutils.save_and_reraise_exception(): [ 963.092329] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.092329] env[61648]: self.force_reraise() [ 963.092329] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.092329] env[61648]: raise self.value [ 963.092329] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 963.092329] env[61648]: updated_port = self._update_port( [ 963.092329] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.092329] env[61648]: _ensure_no_port_binding_failure(port) [ 963.092329] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.092329] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 963.093723] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 963.093723] env[61648]: Removing descriptor: 19 [ 963.398742] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 963.423731] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 963.423959] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 963.424128] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.424304] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 963.424448] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.424639] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 963.424871] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 963.425095] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 963.425222] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 963.425384] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 963.425583] env[61648]: DEBUG nova.virt.hardware [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 963.426530] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64abf07-b519-4755-b732-d57e620063b1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.433987] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cde2aaa-2bbc-4f96-bfb1-e6831ebf203d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.449102] env[61648]: ERROR nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Traceback (most recent call last): [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] yield resources [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self.driver.spawn(context, instance, image_meta, [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] vm_ref = self.build_virtual_machine(instance, [ 963.449102] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] for vif in network_info: [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] return self._sync_wrapper(fn, *args, **kwargs) [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self.wait() [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self[:] = self._gt.wait() [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] return self._exit_event.wait() [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 963.449560] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] current.throw(*self._exc) [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] result = function(*args, **kwargs) [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] return func(*args, **kwargs) [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] raise e [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] nwinfo = self.network_api.allocate_for_instance( [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] created_port_ids = self._update_ports_for_instance( [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] with excutils.save_and_reraise_exception(): [ 963.450029] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self.force_reraise() [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] raise self.value [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] updated_port = self._update_port( [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] _ensure_no_port_binding_failure(port) [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] raise exception.PortBindingFailed(port_id=port['id']) [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 963.450478] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] [ 963.450478] env[61648]: INFO nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Terminating instance [ 963.451338] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.464037] env[61648]: DEBUG nova.network.neutron [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.542660] env[61648]: DEBUG nova.network.neutron [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.550717] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.551244] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 963.553968] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.320s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.048812] env[61648]: DEBUG oslo_concurrency.lockutils [req-5d07b8f6-7785-4426-823f-acadfdf26c0a req-5e5a0f10-75e1-44d1-a348-cb962cd7e78a service nova] Releasing lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.049178] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.049362] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.059884] env[61648]: DEBUG nova.compute.utils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 964.061141] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 964.061309] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 964.071672] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 964.137685] env[61648]: DEBUG nova.policy [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05baa26c79e1430c9945bfa82bd802dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '413caef8b4b34ad49a8aa707ca007dbd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 964.148549] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d581d1-7153-4564-be33-895804f8ec03 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.157154] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0840dcf5-2adc-44b7-ae2a-3170dafbd6a3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.185739] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e79314a-ad3d-4ce3-a05b-0e248e6947d5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.192454] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93100a61-4a71-4983-9d98-be83945c8a16 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.205012] env[61648]: DEBUG nova.compute.provider_tree [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.377016] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Successfully created port: c92a3355-d861-4b1a-81c1-d956c1cfc2b9 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.572605] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 964.621035] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 964.709376] env[61648]: DEBUG nova.scheduler.client.report [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.716230] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.972613] env[61648]: DEBUG nova.compute.manager [req-01d35eab-56aa-4245-91a6-07d9527438f1 req-660684c6-0117-4d15-b949-3ca44b98baae service nova] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Received event network-vif-deleted-80b302ea-03ef-448c-9093-4e9eb97bbcd1 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.081609] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 965.112659] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.112905] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.113148] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.113356] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.113504] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.113649] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.113859] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.114122] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.114302] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.114465] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.114701] env[61648]: DEBUG nova.virt.hardware [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.115554] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f64b04d-9402-490f-88b4-7ee91e40cdee {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.128767] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9863df0-2a61-4db1-bc5e-7209e1089027 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.214231] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.660s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.215085] env[61648]: ERROR nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Traceback (most recent call last): [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self.driver.spawn(context, instance, image_meta, [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] vm_ref = self.build_virtual_machine(instance, [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] vif_infos = vmwarevif.get_vif_info(self._session, [ 965.215085] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] for vif in network_info: [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return self._sync_wrapper(fn, *args, **kwargs) [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self.wait() [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self[:] = self._gt.wait() [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return self._exit_event.wait() [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] result = hub.switch() [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 965.215808] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return self.greenlet.switch() [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] result = function(*args, **kwargs) [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] return func(*args, **kwargs) [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] raise e [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] nwinfo = self.network_api.allocate_for_instance( [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] created_port_ids = self._update_ports_for_instance( [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] with excutils.save_and_reraise_exception(): [ 965.216418] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] self.force_reraise() [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] raise self.value [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] updated_port = self._update_port( [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] _ensure_no_port_binding_failure(port) [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] raise exception.PortBindingFailed(port_id=port['id']) [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] nova.exception.PortBindingFailed: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. [ 965.217037] env[61648]: ERROR nova.compute.manager [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] [ 965.217547] env[61648]: DEBUG nova.compute.utils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 965.218046] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Build of instance 62c944e1-7d39-4ef7-9994-0436008e59f6 was re-scheduled: Binding failed for port 3a823114-e9e9-4617-8318-06a3b9fb425e, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 965.218476] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 965.218705] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.218855] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.219015] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.220396] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.458s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.224682] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.225066] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.229177] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.229965] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2aa916ab-3b53-4b34-83e3-75556c43f4f5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.244496] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c04b7f-5252-4066-8e74-fca68af75639 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.268206] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 00170bcb-99de-4be3-aa30-10c3a381d2ae could not be found. [ 965.268442] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.268621] env[61648]: INFO nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 965.268867] env[61648]: DEBUG oslo.service.loopingcall [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.269160] env[61648]: DEBUG nova.compute.manager [-] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 965.269160] env[61648]: DEBUG nova.network.neutron [-] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.290483] env[61648]: DEBUG nova.network.neutron [-] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.463779] env[61648]: ERROR nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 965.463779] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 965.463779] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 965.463779] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 965.463779] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 965.463779] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 965.463779] env[61648]: ERROR nova.compute.manager raise self.value [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 965.463779] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 965.463779] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 965.463779] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 965.464905] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 965.464905] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 965.464905] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 965.464905] env[61648]: ERROR nova.compute.manager [ 965.464905] env[61648]: Traceback (most recent call last): [ 965.464905] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 965.464905] env[61648]: listener.cb(fileno) [ 965.464905] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 965.464905] env[61648]: result = function(*args, **kwargs) [ 965.464905] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 965.464905] env[61648]: return func(*args, **kwargs) [ 965.464905] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 965.464905] env[61648]: raise e [ 965.464905] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 965.464905] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 965.464905] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 965.464905] env[61648]: created_port_ids = self._update_ports_for_instance( [ 965.464905] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 965.464905] env[61648]: with excutils.save_and_reraise_exception(): [ 965.464905] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 965.464905] env[61648]: self.force_reraise() [ 965.464905] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 965.464905] env[61648]: raise self.value [ 965.464905] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 965.464905] env[61648]: updated_port = self._update_port( [ 965.464905] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 965.464905] env[61648]: _ensure_no_port_binding_failure(port) [ 965.464905] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 965.464905] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 965.466259] env[61648]: nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 965.466259] env[61648]: Removing descriptor: 19 [ 965.466259] env[61648]: ERROR nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Traceback (most recent call last): [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] yield resources [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self.driver.spawn(context, instance, image_meta, [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 965.466259] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] vm_ref = self.build_virtual_machine(instance, [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] vif_infos = vmwarevif.get_vif_info(self._session, [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] for vif in network_info: [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return self._sync_wrapper(fn, *args, **kwargs) [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self.wait() [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self[:] = self._gt.wait() [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return self._exit_event.wait() [ 965.466811] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] result = hub.switch() [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return self.greenlet.switch() [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] result = function(*args, **kwargs) [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return func(*args, **kwargs) [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] raise e [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] nwinfo = self.network_api.allocate_for_instance( [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 965.468195] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] created_port_ids = self._update_ports_for_instance( [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] with excutils.save_and_reraise_exception(): [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self.force_reraise() [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] raise self.value [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] updated_port = self._update_port( [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] _ensure_no_port_binding_failure(port) [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 965.469064] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] raise exception.PortBindingFailed(port_id=port['id']) [ 965.469640] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 965.469640] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] [ 965.469640] env[61648]: INFO nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Terminating instance [ 965.469640] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.469640] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.469640] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.736582] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.790736] env[61648]: DEBUG nova.network.neutron [-] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.815075] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95444b5-ba12-4cb1-8426-d66f346816b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.822902] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43855ba-c063-4ab1-aceb-6ab51510500c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.826509] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.863864] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea059db5-defd-49bf-b4e9-3a06b1b56233 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.871601] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c54f0e0-a1b8-4899-9016-e67514c6d2ec {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.885750] env[61648]: DEBUG nova.compute.provider_tree [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.988810] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.068375] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.294464] env[61648]: INFO nova.compute.manager [-] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Took 1.03 seconds to deallocate network for instance. [ 966.296947] env[61648]: DEBUG nova.compute.claims [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 966.297142] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.328779] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-62c944e1-7d39-4ef7-9994-0436008e59f6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.328985] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 966.329196] env[61648]: DEBUG nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.329361] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.345516] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.389176] env[61648]: DEBUG nova.scheduler.client.report [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.571569] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.571961] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 966.572229] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 966.572550] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-955856e0-e95a-4037-9427-a538b54ce371 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.581356] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c1254d-5347-44d3-9c98-f0f5ee698f77 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.601347] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1fe8fefb-6625-4582-bc35-a3c91f88db0e could not be found. [ 966.601580] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.601800] env[61648]: INFO nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 966.602076] env[61648]: DEBUG oslo.service.loopingcall [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.602341] env[61648]: DEBUG nova.compute.manager [-] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.602528] env[61648]: DEBUG nova.network.neutron [-] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.618470] env[61648]: DEBUG nova.network.neutron [-] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.848020] env[61648]: DEBUG nova.network.neutron [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.893981] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.673s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.894689] env[61648]: ERROR nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Traceback (most recent call last): [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self.driver.spawn(context, instance, image_meta, [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] vm_ref = self.build_virtual_machine(instance, [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] vif_infos = vmwarevif.get_vif_info(self._session, [ 966.894689] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] for vif in network_info: [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] return self._sync_wrapper(fn, *args, **kwargs) [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self.wait() [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self[:] = self._gt.wait() [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] return self._exit_event.wait() [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] current.throw(*self._exc) [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 966.895070] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] result = function(*args, **kwargs) [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] return func(*args, **kwargs) [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] raise e [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] nwinfo = self.network_api.allocate_for_instance( [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] created_port_ids = self._update_ports_for_instance( [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] with excutils.save_and_reraise_exception(): [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] self.force_reraise() [ 966.895650] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] raise self.value [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] updated_port = self._update_port( [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] _ensure_no_port_binding_failure(port) [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] raise exception.PortBindingFailed(port_id=port['id']) [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] nova.exception.PortBindingFailed: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. [ 966.896053] env[61648]: ERROR nova.compute.manager [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] [ 966.896053] env[61648]: DEBUG nova.compute.utils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 966.896953] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.599s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.902705] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Build of instance f4d4f44d-a53a-44e1-ad72-87a0694b395c was re-scheduled: Binding failed for port efeeccd0-4d60-468f-9d01-ae6e44b334a3, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 966.903219] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 966.903486] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquiring lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.903670] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Acquired lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.903865] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.997262] env[61648]: DEBUG nova.compute.manager [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Received event network-changed-c92a3355-d861-4b1a-81c1-d956c1cfc2b9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 966.997373] env[61648]: DEBUG nova.compute.manager [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Refreshing instance network info cache due to event network-changed-c92a3355-d861-4b1a-81c1-d956c1cfc2b9. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 966.997600] env[61648]: DEBUG oslo_concurrency.lockutils [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] Acquiring lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.997710] env[61648]: DEBUG oslo_concurrency.lockutils [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] Acquired lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.997852] env[61648]: DEBUG nova.network.neutron [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Refreshing network info cache for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 967.120851] env[61648]: DEBUG nova.network.neutron [-] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.350835] env[61648]: INFO nova.compute.manager [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 62c944e1-7d39-4ef7-9994-0436008e59f6] Took 1.02 seconds to deallocate network for instance. [ 967.425751] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.467799] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee841ee0-ee78-4162-bb5b-ce33221f84c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.476106] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05382fa1-708d-4eae-b945-67a5c70fde4e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.505938] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edca6a0f-c78b-4d6d-a4fb-8dd8f73c72a1 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.509057] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.515376] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2745462a-35d3-4a6b-b9eb-b0f1f26a030e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.528766] env[61648]: DEBUG nova.compute.provider_tree [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.530428] env[61648]: DEBUG nova.network.neutron [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.599217] env[61648]: DEBUG nova.network.neutron [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.623256] env[61648]: INFO nova.compute.manager [-] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Took 1.02 seconds to deallocate network for instance. [ 967.625395] env[61648]: DEBUG nova.compute.claims [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 967.625567] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.012389] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Releasing lock "refresh_cache-f4d4f44d-a53a-44e1-ad72-87a0694b395c" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.012695] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 968.012934] env[61648]: DEBUG nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 968.013131] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.033632] env[61648]: DEBUG nova.scheduler.client.report [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.039434] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 968.102052] env[61648]: DEBUG oslo_concurrency.lockutils [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] Releasing lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.102379] env[61648]: DEBUG nova.compute.manager [req-021fa112-49ce-4917-aff8-5a61a0972353 req-d43811b4-dc16-44c7-ac1d-fca1061fcd88 service nova] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Received event network-vif-deleted-c92a3355-d861-4b1a-81c1-d956c1cfc2b9 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.387820] env[61648]: INFO nova.scheduler.client.report [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Deleted allocations for instance 62c944e1-7d39-4ef7-9994-0436008e59f6 [ 968.538146] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.641s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.538827] env[61648]: ERROR nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Traceback (most recent call last): [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self.driver.spawn(context, instance, image_meta, [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] vm_ref = self.build_virtual_machine(instance, [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 968.538827] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] for vif in network_info: [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] return self._sync_wrapper(fn, *args, **kwargs) [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self.wait() [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self[:] = self._gt.wait() [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] return self._exit_event.wait() [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] current.throw(*self._exc) [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 968.539251] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] result = function(*args, **kwargs) [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] return func(*args, **kwargs) [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] raise e [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] nwinfo = self.network_api.allocate_for_instance( [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] created_port_ids = self._update_ports_for_instance( [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] with excutils.save_and_reraise_exception(): [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] self.force_reraise() [ 968.539636] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] raise self.value [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] updated_port = self._update_port( [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] _ensure_no_port_binding_failure(port) [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] raise exception.PortBindingFailed(port_id=port['id']) [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] nova.exception.PortBindingFailed: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. [ 968.539999] env[61648]: ERROR nova.compute.manager [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] [ 968.539999] env[61648]: DEBUG nova.compute.utils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 968.541145] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.916s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.543819] env[61648]: DEBUG nova.network.neutron [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.545096] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Build of instance 00170bcb-99de-4be3-aa30-10c3a381d2ae was re-scheduled: Binding failed for port 80b302ea-03ef-448c-9093-4e9eb97bbcd1, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 968.545542] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 968.545801] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquiring lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.545958] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Acquired lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.546127] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.895207] env[61648]: DEBUG oslo_concurrency.lockutils [None req-556d7c23-d62d-4c04-8b3d-96eabd1657a6 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "62c944e1-7d39-4ef7-9994-0436008e59f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.516s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.055773] env[61648]: INFO nova.compute.manager [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] [instance: f4d4f44d-a53a-44e1-ad72-87a0694b395c] Took 1.04 seconds to deallocate network for instance. [ 969.079204] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.115342] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52896f9c-919c-46c8-bf20-75273fb6e134 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.129107] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260a57d1-1541-4c67-bd8e-0b9ff1c7bf33 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.158313] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7844ae64-8b21-4bad-b258-bed3cec86016 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.169015] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888fdc15-c708-4507-a721-1e5222170212 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.178294] env[61648]: DEBUG nova.compute.provider_tree [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.180667] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.683156] env[61648]: DEBUG nova.scheduler.client.report [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 969.687788] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Releasing lock "refresh_cache-00170bcb-99de-4be3-aa30-10c3a381d2ae" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.688093] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 969.688340] env[61648]: DEBUG nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 969.688569] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.707749] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.087151] env[61648]: INFO nova.scheduler.client.report [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Deleted allocations for instance f4d4f44d-a53a-44e1-ad72-87a0694b395c [ 970.195024] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.651s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.195024] env[61648]: ERROR nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Traceback (most recent call last): [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self.driver.spawn(context, instance, image_meta, [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 970.195024] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] vm_ref = self.build_virtual_machine(instance, [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] vif_infos = vmwarevif.get_vif_info(self._session, [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] for vif in network_info: [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return self._sync_wrapper(fn, *args, **kwargs) [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self.wait() [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self[:] = self._gt.wait() [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return self._exit_event.wait() [ 970.195335] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] result = hub.switch() [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return self.greenlet.switch() [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] result = function(*args, **kwargs) [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] return func(*args, **kwargs) [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] raise e [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] nwinfo = self.network_api.allocate_for_instance( [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 970.195737] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] created_port_ids = self._update_ports_for_instance( [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] with excutils.save_and_reraise_exception(): [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] self.force_reraise() [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] raise self.value [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] updated_port = self._update_port( [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] _ensure_no_port_binding_failure(port) [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 970.196143] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] raise exception.PortBindingFailed(port_id=port['id']) [ 970.196561] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] nova.exception.PortBindingFailed: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. [ 970.196561] env[61648]: ERROR nova.compute.manager [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] [ 970.196561] env[61648]: DEBUG nova.compute.utils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 970.197999] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Build of instance 1fe8fefb-6625-4582-bc35-a3c91f88db0e was re-scheduled: Binding failed for port c92a3355-d861-4b1a-81c1-d956c1cfc2b9, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 970.198676] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 970.200340] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.200340] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.200340] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.211281] env[61648]: DEBUG nova.network.neutron [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.240023] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "99f8bd98-5bc2-424a-9025-cef7d89b5939" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.240023] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "99f8bd98-5bc2-424a-9025-cef7d89b5939" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.599679] env[61648]: DEBUG oslo_concurrency.lockutils [None req-ed764055-63dc-4910-807a-6d0807ce24e8 tempest-ServersTestManualDisk-1735499096 tempest-ServersTestManualDisk-1735499096-project-member] Lock "f4d4f44d-a53a-44e1-ad72-87a0694b395c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.871s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.715832] env[61648]: INFO nova.compute.manager [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] [instance: 00170bcb-99de-4be3-aa30-10c3a381d2ae] Took 1.02 seconds to deallocate network for instance. [ 970.719131] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.747892] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 970.804448] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.277227] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.277227] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.277227] env[61648]: INFO nova.compute.claims [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.308025] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-1fe8fefb-6625-4582-bc35-a3c91f88db0e" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.308025] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 971.308025] env[61648]: DEBUG nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 971.308025] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 971.331369] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.754096] env[61648]: INFO nova.scheduler.client.report [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Deleted allocations for instance 00170bcb-99de-4be3-aa30-10c3a381d2ae [ 971.833528] env[61648]: DEBUG nova.network.neutron [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.260675] env[61648]: DEBUG oslo_concurrency.lockutils [None req-1112052d-f4d8-4489-8e50-cccc4f4d9b36 tempest-ServerDiskConfigTestJSON-417367025 tempest-ServerDiskConfigTestJSON-417367025-project-member] Lock "00170bcb-99de-4be3-aa30-10c3a381d2ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.088s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.336705] env[61648]: INFO nova.compute.manager [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 1fe8fefb-6625-4582-bc35-a3c91f88db0e] Took 1.03 seconds to deallocate network for instance. [ 972.342296] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de246e21-ab78-444b-a4d3-aa328ee9138b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.353779] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c64d6a6-4fe3-436d-8698-29f34bab8202 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.388230] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036bf847-0879-40a8-bc47-0314bdeac24a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.398173] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03986cf7-898f-4c4e-bf0f-f0b9c25803c8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.412373] env[61648]: DEBUG nova.compute.provider_tree [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.916742] env[61648]: DEBUG nova.scheduler.client.report [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 973.383200] env[61648]: INFO nova.scheduler.client.report [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Deleted allocations for instance 1fe8fefb-6625-4582-bc35-a3c91f88db0e [ 973.422718] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.423263] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 973.895215] env[61648]: DEBUG oslo_concurrency.lockutils [None req-c475bf7e-4452-4e0e-934b-4e6c6024cf9d tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "1fe8fefb-6625-4582-bc35-a3c91f88db0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.474s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.932516] env[61648]: DEBUG nova.compute.utils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 973.934857] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 973.934857] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 973.997186] env[61648]: DEBUG nova.policy [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8658758cec10421ea417eb40a1a88ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61bcab083e6b4e1da5a11cfc573e1e2f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 974.329960] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Successfully created port: ec377319-541a-428d-8161-397e2c6e64b0 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 974.443296] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 975.344019] env[61648]: DEBUG nova.compute.manager [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Received event network-changed-ec377319-541a-428d-8161-397e2c6e64b0 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.344019] env[61648]: DEBUG nova.compute.manager [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Refreshing instance network info cache due to event network-changed-ec377319-541a-428d-8161-397e2c6e64b0. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.346075] env[61648]: DEBUG oslo_concurrency.lockutils [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] Acquiring lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.346331] env[61648]: DEBUG oslo_concurrency.lockutils [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] Acquired lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.346516] env[61648]: DEBUG nova.network.neutron [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Refreshing network info cache for port ec377319-541a-428d-8161-397e2c6e64b0 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.458448] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 975.490498] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 975.490498] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 975.490669] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 975.490837] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 975.491010] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 975.493324] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 975.493574] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 975.493743] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 975.493916] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 975.494093] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 975.494273] env[61648]: DEBUG nova.virt.hardware [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 975.495521] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b511f950-4e31-4dcc-9d16-a3b8d978e108 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.504406] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccb024a-d921-4040-b9e8-4a0e5a9c6da3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.521806] env[61648]: ERROR nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 975.521806] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 975.521806] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 975.521806] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 975.521806] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 975.521806] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 975.521806] env[61648]: ERROR nova.compute.manager raise self.value [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 975.521806] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 975.521806] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 975.521806] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 975.522488] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 975.522488] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 975.522488] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 975.522488] env[61648]: ERROR nova.compute.manager [ 975.522488] env[61648]: Traceback (most recent call last): [ 975.522488] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 975.522488] env[61648]: listener.cb(fileno) [ 975.522488] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 975.522488] env[61648]: result = function(*args, **kwargs) [ 975.522488] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 975.522488] env[61648]: return func(*args, **kwargs) [ 975.522488] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 975.522488] env[61648]: raise e [ 975.522488] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 975.522488] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 975.522488] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 975.522488] env[61648]: created_port_ids = self._update_ports_for_instance( [ 975.522488] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 975.522488] env[61648]: with excutils.save_and_reraise_exception(): [ 975.522488] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 975.522488] env[61648]: self.force_reraise() [ 975.522488] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 975.522488] env[61648]: raise self.value [ 975.522488] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 975.522488] env[61648]: updated_port = self._update_port( [ 975.522488] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 975.522488] env[61648]: _ensure_no_port_binding_failure(port) [ 975.522488] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 975.522488] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 975.523528] env[61648]: nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 975.523528] env[61648]: Removing descriptor: 14 [ 975.523528] env[61648]: ERROR nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Traceback (most recent call last): [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] yield resources [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self.driver.spawn(context, instance, image_meta, [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self._vmops.spawn(context, instance, image_meta, injected_files, [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 975.523528] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] vm_ref = self.build_virtual_machine(instance, [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] vif_infos = vmwarevif.get_vif_info(self._session, [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] for vif in network_info: [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return self._sync_wrapper(fn, *args, **kwargs) [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self.wait() [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self[:] = self._gt.wait() [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return self._exit_event.wait() [ 975.523938] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] result = hub.switch() [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return self.greenlet.switch() [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] result = function(*args, **kwargs) [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return func(*args, **kwargs) [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] raise e [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] nwinfo = self.network_api.allocate_for_instance( [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 975.524396] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] created_port_ids = self._update_ports_for_instance( [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] with excutils.save_and_reraise_exception(): [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self.force_reraise() [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] raise self.value [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] updated_port = self._update_port( [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] _ensure_no_port_binding_failure(port) [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 975.524844] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] raise exception.PortBindingFailed(port_id=port['id']) [ 975.525358] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 975.525358] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] [ 975.525358] env[61648]: INFO nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Terminating instance [ 975.525358] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.794143] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.794363] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.867235] env[61648]: DEBUG nova.network.neutron [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.871526] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquiring lock "be8b87c8-5641-46e2-b191-cddd7019934b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.872898] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Lock "be8b87c8-5641-46e2-b191-cddd7019934b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.960254] env[61648]: DEBUG nova.network.neutron [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.297109] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 976.375394] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 976.467209] env[61648]: DEBUG oslo_concurrency.lockutils [req-b8884f32-8997-455b-964b-f8abe911b3e9 req-db43f6f4-9629-43cc-a1fa-79fe42f8ebae service nova] Releasing lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.467209] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.467209] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.821147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.821147] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.821809] env[61648]: INFO nova.compute.claims [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.896784] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.984359] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 977.057846] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.373075] env[61648]: DEBUG nova.compute.manager [req-25043805-38b5-4518-8654-7fdaf84c5c90 req-6d1d1d9b-346d-4cde-ba3e-d9362d0b8b70 service nova] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Received event network-vif-deleted-ec377319-541a-428d-8161-397e2c6e64b0 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.561729] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.562188] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 977.562382] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.562678] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e540a60a-42c2-4488-bc34-7a15f8899eae {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.572483] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0d1aad-bb56-4d11-93c3-4081fe54ce85 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.593289] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 99f8bd98-5bc2-424a-9025-cef7d89b5939 could not be found. [ 977.593492] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.593667] env[61648]: INFO nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Took 0.03 seconds to destroy the instance on the hypervisor. [ 977.593899] env[61648]: DEBUG oslo.service.loopingcall [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.594123] env[61648]: DEBUG nova.compute.manager [-] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 977.594219] env[61648]: DEBUG nova.network.neutron [-] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.609794] env[61648]: DEBUG nova.network.neutron [-] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 977.879163] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54064982-2e9f-4f04-9f7c-e16592d826bf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.886372] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4dd05a4-9ad0-4a3b-8da4-5e3bdc961f4c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.917443] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df811920-74ba-4737-996a-f219a05e51e5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.924378] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa40dc4b-24d7-4d0a-b2bf-812dece5e35c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.937243] env[61648]: DEBUG nova.compute.provider_tree [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.112365] env[61648]: DEBUG nova.network.neutron [-] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.447728] env[61648]: DEBUG nova.scheduler.client.report [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.614507] env[61648]: INFO nova.compute.manager [-] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Took 1.02 seconds to deallocate network for instance. [ 978.620966] env[61648]: DEBUG nova.compute.claims [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 978.621134] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.953244] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.953813] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 978.956645] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.060s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.958117] env[61648]: INFO nova.compute.claims [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.463177] env[61648]: DEBUG nova.compute.utils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.465576] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 979.465781] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.516346] env[61648]: DEBUG nova.policy [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05baa26c79e1430c9945bfa82bd802dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '413caef8b4b34ad49a8aa707ca007dbd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 979.799888] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Successfully created port: 84f5098d-1d14-44ef-857e-b5f252475fc2 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.970518] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 980.022131] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc66178b-6275-4ea0-b017-fe0552736eb5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.030087] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6e4e96-42f8-42ae-9a7d-2c21c876554a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.060564] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798eb2c3-0ba7-4452-bf4d-3a81593ef576 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.067874] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e0947c-d865-4b54-9ff9-6e740493991b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.081082] env[61648]: DEBUG nova.compute.provider_tree [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.572072] env[61648]: DEBUG nova.compute.manager [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Received event network-changed-84f5098d-1d14-44ef-857e-b5f252475fc2 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.572300] env[61648]: DEBUG nova.compute.manager [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Refreshing instance network info cache due to event network-changed-84f5098d-1d14-44ef-857e-b5f252475fc2. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 980.572519] env[61648]: DEBUG oslo_concurrency.lockutils [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] Acquiring lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.572659] env[61648]: DEBUG oslo_concurrency.lockutils [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] Acquired lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.572823] env[61648]: DEBUG nova.network.neutron [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Refreshing network info cache for port 84f5098d-1d14-44ef-857e-b5f252475fc2 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.587057] env[61648]: DEBUG nova.scheduler.client.report [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.804815] env[61648]: ERROR nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 980.804815] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 980.804815] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 980.804815] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 980.804815] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 980.804815] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 980.804815] env[61648]: ERROR nova.compute.manager raise self.value [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 980.804815] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 980.804815] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 980.804815] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 980.805670] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 980.805670] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 980.805670] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 980.805670] env[61648]: ERROR nova.compute.manager [ 980.805670] env[61648]: Traceback (most recent call last): [ 980.805670] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 980.805670] env[61648]: listener.cb(fileno) [ 980.805670] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 980.805670] env[61648]: result = function(*args, **kwargs) [ 980.805670] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 980.805670] env[61648]: return func(*args, **kwargs) [ 980.805670] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 980.805670] env[61648]: raise e [ 980.805670] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 980.805670] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 980.805670] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 980.805670] env[61648]: created_port_ids = self._update_ports_for_instance( [ 980.805670] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 980.805670] env[61648]: with excutils.save_and_reraise_exception(): [ 980.805670] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 980.805670] env[61648]: self.force_reraise() [ 980.805670] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 980.805670] env[61648]: raise self.value [ 980.805670] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 980.805670] env[61648]: updated_port = self._update_port( [ 980.805670] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 980.805670] env[61648]: _ensure_no_port_binding_failure(port) [ 980.805670] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 980.805670] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 980.806598] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 980.806598] env[61648]: Removing descriptor: 14 [ 980.983103] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 981.008234] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 981.008500] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.008656] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.008834] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.008979] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.009144] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.009481] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.009745] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.009934] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.010124] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.010306] env[61648]: DEBUG nova.virt.hardware [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.011174] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03bf812-6300-43e5-9e7e-6ca44c872a2f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.019259] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b3b583-ad49-4127-a361-c5ae009672bd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.032593] env[61648]: ERROR nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Traceback (most recent call last): [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] yield resources [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self.driver.spawn(context, instance, image_meta, [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] vm_ref = self.build_virtual_machine(instance, [ 981.032593] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] vif_infos = vmwarevif.get_vif_info(self._session, [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] for vif in network_info: [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] return self._sync_wrapper(fn, *args, **kwargs) [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self.wait() [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self[:] = self._gt.wait() [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] return self._exit_event.wait() [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 981.032977] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] current.throw(*self._exc) [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] result = function(*args, **kwargs) [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] return func(*args, **kwargs) [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] raise e [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] nwinfo = self.network_api.allocate_for_instance( [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] created_port_ids = self._update_ports_for_instance( [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] with excutils.save_and_reraise_exception(): [ 981.033496] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self.force_reraise() [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] raise self.value [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] updated_port = self._update_port( [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] _ensure_no_port_binding_failure(port) [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] raise exception.PortBindingFailed(port_id=port['id']) [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 981.033869] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] [ 981.033869] env[61648]: INFO nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Terminating instance [ 981.035254] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.090148] env[61648]: DEBUG nova.network.neutron [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.092260] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.092727] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 981.095547] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.474s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.211137] env[61648]: DEBUG nova.network.neutron [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.607574] env[61648]: DEBUG nova.compute.utils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 981.609176] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 981.609395] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 981.658352] env[61648]: DEBUG nova.policy [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '391c1fe6220343bfbcd1e3782f8b5ab6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a46bbe8bd7e44729829bca761f4013c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 981.666505] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a232dc-f338-46f3-9c16-6def30b0b2ce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.674305] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14bd6d6-71b1-40ae-9e19-4bd458dcb79d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.711669] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dbc1b4-7374-4292-9204-0dbbbeaf4f3e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.714530] env[61648]: DEBUG oslo_concurrency.lockutils [req-7511a3ad-58e5-413e-835c-c154ef632871 req-ca2727e2-61bf-4224-a050-a929d215046b service nova] Releasing lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.714892] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.715080] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.722483] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd141cb3-685f-4f04-9d93-f1dfb1f9f4f8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.736874] env[61648]: DEBUG nova.compute.provider_tree [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.934249] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Successfully created port: 39f95e58-d3e0-48ec-b9fe-dffe5288f556 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.113625] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 982.239170] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.241546] env[61648]: DEBUG nova.scheduler.client.report [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.349246] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.598640] env[61648]: DEBUG nova.compute.manager [req-43c39d84-af84-4016-8372-6ffcb3bf3bf3 req-fe8aedce-7be7-4872-b8e5-1a737ecd36e1 service nova] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Received event network-vif-deleted-84f5098d-1d14-44ef-857e-b5f252475fc2 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 982.747557] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.652s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.748211] env[61648]: ERROR nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Traceback (most recent call last): [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self.driver.spawn(context, instance, image_meta, [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self._vmops.spawn(context, instance, image_meta, injected_files, [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] vm_ref = self.build_virtual_machine(instance, [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] vif_infos = vmwarevif.get_vif_info(self._session, [ 982.748211] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] for vif in network_info: [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return self._sync_wrapper(fn, *args, **kwargs) [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self.wait() [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self[:] = self._gt.wait() [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return self._exit_event.wait() [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] result = hub.switch() [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 982.748622] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return self.greenlet.switch() [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] result = function(*args, **kwargs) [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] return func(*args, **kwargs) [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] raise e [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] nwinfo = self.network_api.allocate_for_instance( [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] created_port_ids = self._update_ports_for_instance( [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] with excutils.save_and_reraise_exception(): [ 982.749067] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] self.force_reraise() [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] raise self.value [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] updated_port = self._update_port( [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] _ensure_no_port_binding_failure(port) [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] raise exception.PortBindingFailed(port_id=port['id']) [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] nova.exception.PortBindingFailed: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. [ 982.749487] env[61648]: ERROR nova.compute.manager [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] [ 982.749850] env[61648]: DEBUG nova.compute.utils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 982.750705] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Build of instance 99f8bd98-5bc2-424a-9025-cef7d89b5939 was re-scheduled: Binding failed for port ec377319-541a-428d-8161-397e2c6e64b0, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 982.751123] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 982.751349] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquiring lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.751495] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Acquired lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.751650] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.813242] env[61648]: ERROR nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 982.813242] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 982.813242] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 982.813242] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 982.813242] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.813242] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.813242] env[61648]: ERROR nova.compute.manager raise self.value [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 982.813242] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 982.813242] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 982.813242] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 982.813843] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 982.813843] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 982.813843] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 982.813843] env[61648]: ERROR nova.compute.manager [ 982.813843] env[61648]: Traceback (most recent call last): [ 982.813843] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 982.813843] env[61648]: listener.cb(fileno) [ 982.813843] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 982.813843] env[61648]: result = function(*args, **kwargs) [ 982.813843] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 982.813843] env[61648]: return func(*args, **kwargs) [ 982.813843] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 982.813843] env[61648]: raise e [ 982.813843] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 982.813843] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 982.813843] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 982.813843] env[61648]: created_port_ids = self._update_ports_for_instance( [ 982.813843] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 982.813843] env[61648]: with excutils.save_and_reraise_exception(): [ 982.813843] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.813843] env[61648]: self.force_reraise() [ 982.813843] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.813843] env[61648]: raise self.value [ 982.813843] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 982.813843] env[61648]: updated_port = self._update_port( [ 982.813843] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 982.813843] env[61648]: _ensure_no_port_binding_failure(port) [ 982.813843] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 982.813843] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 982.814795] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 982.814795] env[61648]: Removing descriptor: 14 [ 982.851705] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.852170] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 982.852439] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.852682] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44aa7687-d03b-45b1-a54d-c841581d3b13 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.868045] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efc1528-8898-432b-9f7d-21041592afad {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.888203] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d could not be found. [ 982.888321] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.888451] env[61648]: INFO nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 982.888659] env[61648]: DEBUG oslo.service.loopingcall [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.888910] env[61648]: DEBUG nova.compute.manager [-] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 982.889019] env[61648]: DEBUG nova.network.neutron [-] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.905645] env[61648]: DEBUG nova.network.neutron [-] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.122978] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 983.148773] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.149036] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.149198] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.149375] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.149541] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.149931] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.150239] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.150410] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.150579] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.150739] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.150910] env[61648]: DEBUG nova.virt.hardware [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.151775] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc5410d-e4fc-4e1b-9867-8f966181acce {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.160082] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aab79bc-21ba-4049-820f-c12ee1731236 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.173647] env[61648]: ERROR nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Traceback (most recent call last): [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] yield resources [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self.driver.spawn(context, instance, image_meta, [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] vm_ref = self.build_virtual_machine(instance, [ 983.173647] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] vif_infos = vmwarevif.get_vif_info(self._session, [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] for vif in network_info: [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] return self._sync_wrapper(fn, *args, **kwargs) [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self.wait() [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self[:] = self._gt.wait() [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] return self._exit_event.wait() [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 983.174099] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] current.throw(*self._exc) [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] result = function(*args, **kwargs) [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] return func(*args, **kwargs) [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] raise e [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] nwinfo = self.network_api.allocate_for_instance( [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] created_port_ids = self._update_ports_for_instance( [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] with excutils.save_and_reraise_exception(): [ 983.174539] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self.force_reraise() [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] raise self.value [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] updated_port = self._update_port( [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] _ensure_no_port_binding_failure(port) [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] raise exception.PortBindingFailed(port_id=port['id']) [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 983.174968] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] [ 983.174968] env[61648]: INFO nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Terminating instance [ 983.176054] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquiring lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.176216] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquired lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.176379] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.271614] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.338259] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.408728] env[61648]: DEBUG nova.network.neutron [-] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.694234] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.761895] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.840824] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Releasing lock "refresh_cache-99f8bd98-5bc2-424a-9025-cef7d89b5939" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.841101] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 983.841282] env[61648]: DEBUG nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 983.841449] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.856554] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.913704] env[61648]: INFO nova.compute.manager [-] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Took 1.02 seconds to deallocate network for instance. [ 983.917996] env[61648]: DEBUG nova.compute.claims [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 983.918385] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.918751] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.265425] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Releasing lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.265911] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 984.266120] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 984.266451] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5d91bf5-b670-4895-8c11-94892dae090e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.276033] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff147e4-a66e-483f-bcc3-dd49cee8aabd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.297403] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance be8b87c8-5641-46e2-b191-cddd7019934b could not be found. [ 984.297619] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.297808] env[61648]: INFO nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 984.298052] env[61648]: DEBUG oslo.service.loopingcall [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.298270] env[61648]: DEBUG nova.compute.manager [-] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 984.298433] env[61648]: DEBUG nova.network.neutron [-] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 984.313913] env[61648]: DEBUG nova.network.neutron [-] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 984.359243] env[61648]: DEBUG nova.network.neutron [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.478149] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5854ce93-6ffc-4d26-acb6-d65e48f9e4e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.486104] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225d104b-6a0e-4e5d-a977-4fb6e2d83a39 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.515925] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b297affb-1dab-4bff-899d-2297e85f2975 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.523103] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e80fe3-8d15-4802-854b-ee819393fa3f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.535928] env[61648]: DEBUG nova.compute.provider_tree [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.625187] env[61648]: DEBUG nova.compute.manager [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Received event network-changed-39f95e58-d3e0-48ec-b9fe-dffe5288f556 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.625474] env[61648]: DEBUG nova.compute.manager [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Refreshing instance network info cache due to event network-changed-39f95e58-d3e0-48ec-b9fe-dffe5288f556. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 984.625656] env[61648]: DEBUG oslo_concurrency.lockutils [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] Acquiring lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.625793] env[61648]: DEBUG oslo_concurrency.lockutils [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] Acquired lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.625969] env[61648]: DEBUG nova.network.neutron [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Refreshing network info cache for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 984.816128] env[61648]: DEBUG nova.network.neutron [-] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.861605] env[61648]: INFO nova.compute.manager [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] [instance: 99f8bd98-5bc2-424a-9025-cef7d89b5939] Took 1.02 seconds to deallocate network for instance. [ 985.039509] env[61648]: DEBUG nova.scheduler.client.report [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 985.143920] env[61648]: DEBUG nova.network.neutron [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.217595] env[61648]: DEBUG nova.network.neutron [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.318305] env[61648]: INFO nova.compute.manager [-] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Took 1.02 seconds to deallocate network for instance. [ 985.320669] env[61648]: DEBUG nova.compute.claims [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 985.320846] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.544994] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.626s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.545687] env[61648]: ERROR nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Traceback (most recent call last): [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self.driver.spawn(context, instance, image_meta, [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] vm_ref = self.build_virtual_machine(instance, [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] vif_infos = vmwarevif.get_vif_info(self._session, [ 985.545687] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] for vif in network_info: [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] return self._sync_wrapper(fn, *args, **kwargs) [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self.wait() [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self[:] = self._gt.wait() [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] return self._exit_event.wait() [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] current.throw(*self._exc) [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 985.546178] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] result = function(*args, **kwargs) [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] return func(*args, **kwargs) [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] raise e [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] nwinfo = self.network_api.allocate_for_instance( [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] created_port_ids = self._update_ports_for_instance( [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] with excutils.save_and_reraise_exception(): [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] self.force_reraise() [ 985.546644] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] raise self.value [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] updated_port = self._update_port( [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] _ensure_no_port_binding_failure(port) [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] raise exception.PortBindingFailed(port_id=port['id']) [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] nova.exception.PortBindingFailed: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. [ 985.547088] env[61648]: ERROR nova.compute.manager [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] [ 985.547088] env[61648]: DEBUG nova.compute.utils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 985.547530] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.227s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.550627] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Build of instance 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d was re-scheduled: Binding failed for port 84f5098d-1d14-44ef-857e-b5f252475fc2, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 985.551064] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 985.551282] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquiring lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.551427] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Acquired lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.551581] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.720330] env[61648]: DEBUG oslo_concurrency.lockutils [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] Releasing lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.720539] env[61648]: DEBUG nova.compute.manager [req-e9bc500f-d204-4801-ac08-2f6a271154cf req-a89b3112-066d-4a26-bc50-c0410ca1f92b service nova] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Received event network-vif-deleted-39f95e58-d3e0-48ec-b9fe-dffe5288f556 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.888145] env[61648]: INFO nova.scheduler.client.report [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Deleted allocations for instance 99f8bd98-5bc2-424a-9025-cef7d89b5939 [ 986.077045] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 986.094328] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce49e331-93ca-4159-b095-60601c403c6f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.101983] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445dc412-32a6-482e-9b8b-4fc292268820 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.143365] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5730f857-e373-403d-b7ba-e471c72d51eb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.151176] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816b2e5f-7891-4c9d-ad61-6f1ab48db249 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.164057] env[61648]: DEBUG nova.compute.provider_tree [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.179529] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.396903] env[61648]: DEBUG oslo_concurrency.lockutils [None req-dd0f5958-36b3-4cbb-96b0-b98e3585d4c0 tempest-DeleteServersTestJSON-1773045320 tempest-DeleteServersTestJSON-1773045320-project-member] Lock "99f8bd98-5bc2-424a-9025-cef7d89b5939" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.157s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.667208] env[61648]: DEBUG nova.scheduler.client.report [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.681665] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Releasing lock "refresh_cache-4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.681873] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 986.682090] env[61648]: DEBUG nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 986.682225] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 986.698940] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.172193] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.624s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.172857] env[61648]: ERROR nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Traceback (most recent call last): [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self.driver.spawn(context, instance, image_meta, [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] vm_ref = self.build_virtual_machine(instance, [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] vif_infos = vmwarevif.get_vif_info(self._session, [ 987.172857] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] for vif in network_info: [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] return self._sync_wrapper(fn, *args, **kwargs) [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self.wait() [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self[:] = self._gt.wait() [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] return self._exit_event.wait() [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] current.throw(*self._exc) [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 987.173250] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] result = function(*args, **kwargs) [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] return func(*args, **kwargs) [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] raise e [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] nwinfo = self.network_api.allocate_for_instance( [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] created_port_ids = self._update_ports_for_instance( [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] with excutils.save_and_reraise_exception(): [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] self.force_reraise() [ 987.173616] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] raise self.value [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] updated_port = self._update_port( [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] _ensure_no_port_binding_failure(port) [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] raise exception.PortBindingFailed(port_id=port['id']) [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] nova.exception.PortBindingFailed: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. [ 987.173976] env[61648]: ERROR nova.compute.manager [instance: be8b87c8-5641-46e2-b191-cddd7019934b] [ 987.173976] env[61648]: DEBUG nova.compute.utils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 987.175535] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Build of instance be8b87c8-5641-46e2-b191-cddd7019934b was re-scheduled: Binding failed for port 39f95e58-d3e0-48ec-b9fe-dffe5288f556, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 987.176013] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 987.176252] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquiring lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.176472] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Acquired lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.176623] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.201206] env[61648]: DEBUG nova.network.neutron [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.697611] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.704146] env[61648]: INFO nova.compute.manager [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] [instance: 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d] Took 1.02 seconds to deallocate network for instance. [ 987.803699] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.307893] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Releasing lock "refresh_cache-be8b87c8-5641-46e2-b191-cddd7019934b" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.308101] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 988.308285] env[61648]: DEBUG nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 988.308448] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.332665] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.736378] env[61648]: INFO nova.scheduler.client.report [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Deleted allocations for instance 4a5fa23b-60ac-46e1-b419-2c5b5236ce7d [ 988.835755] env[61648]: DEBUG nova.network.neutron [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.246959] env[61648]: DEBUG oslo_concurrency.lockutils [None req-6dc68197-4b49-468e-9b55-d9ca7cd8f256 tempest-ServersTestJSON-2136554250 tempest-ServersTestJSON-2136554250-project-member] Lock "4a5fa23b-60ac-46e1-b419-2c5b5236ce7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.452s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.341864] env[61648]: INFO nova.compute.manager [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] [instance: be8b87c8-5641-46e2-b191-cddd7019934b] Took 1.03 seconds to deallocate network for instance. [ 989.859083] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.391589] env[61648]: INFO nova.scheduler.client.report [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Deleted allocations for instance be8b87c8-5641-46e2-b191-cddd7019934b [ 990.854556] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.905859] env[61648]: DEBUG oslo_concurrency.lockutils [None req-375ff697-1c8f-4c53-b29e-887f6b08453c tempest-ServerAddressesTestJSON-721150574 tempest-ServerAddressesTestJSON-721150574-project-member] Lock "be8b87c8-5641-46e2-b191-cddd7019934b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.033s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.858618] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.858786] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 992.859537] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 993.860067] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.785939] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquiring lock "5fe38841-dd68-43d0-ab6a-e99137a28ec6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.786322] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Lock "5fe38841-dd68-43d0-ab6a-e99137a28ec6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.859153] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.859331] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 994.859446] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 995.288417] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 995.362626] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Didn't find any instances for network info cache update. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 995.362821] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 995.362991] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 995.812452] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.812739] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.814333] env[61648]: INFO nova.compute.claims [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.865747] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.850178] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af391a8-2595-465e-8aa6-c22c96f28d88 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.857880] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfe9e8b-b98c-48bc-b539-b95501b31f11 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.888142] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c143ebd3-9f43-4886-878d-b51efc42fdf3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.895372] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f13a4e5-f026-4c8a-96e5-b95cc51775e0 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.908469] env[61648]: DEBUG nova.compute.provider_tree [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.430684] env[61648]: ERROR nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [req-eea644b3-445b-4535-a2ee-04177955e8e7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eea644b3-445b-4535-a2ee-04177955e8e7"}]} [ 997.452459] env[61648]: DEBUG nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 997.471681] env[61648]: DEBUG nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 997.471892] env[61648]: DEBUG nova.compute.provider_tree [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.488794] env[61648]: DEBUG nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 997.507215] env[61648]: DEBUG nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 997.535102] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af19b0a-a13c-444b-95b6-543838ccdd88 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.543865] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c7922f-01b7-4612-a76d-557c0bf66e87 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.573853] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa79503-5eca-467b-ac9b-f1ba6a7c2d91 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.581723] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b7dd97-9c6b-4c14-9385-efa173ee61e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.595109] env[61648]: DEBUG nova.compute.provider_tree [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.124359] env[61648]: DEBUG nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 998.124699] env[61648]: DEBUG nova.compute.provider_tree [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 120 to 121 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 998.124792] env[61648]: DEBUG nova.compute.provider_tree [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.629787] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.817s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.630334] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 998.633306] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.768s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.633497] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.633646] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 998.634699] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592ea544-635f-4ac0-a8b4-09cf8d55c4ee {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.643122] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c648889b-c425-4f12-ab42-8e0ef1afc4c9 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.659684] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0d335f-268c-4f50-890c-362ac77974f6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.665709] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d878b49-2116-4d02-b598-2d00e75fb1d7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.703432] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181374MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 998.703633] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.703842] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.136109] env[61648]: DEBUG nova.compute.utils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.137463] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 999.137633] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.201396] env[61648]: DEBUG nova.policy [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72fc515fab444f82b247bcc12532554b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fe44273186441de93682c7941f6607a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 999.441998] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Successfully created port: b64a283c-56bc-4c20-8271-bc6c09593ded {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.641336] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 999.729061] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Instance 5fe38841-dd68-43d0-ab6a-e99137a28ec6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61648) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 999.729300] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 999.729491] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 999.764599] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa463d4-6fed-4373-90f6-2e7ab56e7113 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.781480] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9257918e-9601-47cf-adf6-376b960a6fbc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.813308] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b175c86f-922f-4a1e-b6b0-40a374d6f867 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.825204] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0d9f91-2fcc-4ae4-9401-428bc29a4758 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.840445] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.157494] env[61648]: DEBUG nova.compute.manager [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Received event network-changed-b64a283c-56bc-4c20-8271-bc6c09593ded {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1000.157789] env[61648]: DEBUG nova.compute.manager [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Refreshing instance network info cache due to event network-changed-b64a283c-56bc-4c20-8271-bc6c09593ded. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1000.158594] env[61648]: DEBUG oslo_concurrency.lockutils [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] Acquiring lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.158797] env[61648]: DEBUG oslo_concurrency.lockutils [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] Acquired lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.158996] env[61648]: DEBUG nova.network.neutron [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Refreshing network info cache for port b64a283c-56bc-4c20-8271-bc6c09593ded {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.312100] env[61648]: ERROR nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1000.312100] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1000.312100] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1000.312100] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1000.312100] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1000.312100] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1000.312100] env[61648]: ERROR nova.compute.manager raise self.value [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1000.312100] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 1000.312100] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1000.312100] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1000.312691] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1000.312691] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1000.312691] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1000.312691] env[61648]: ERROR nova.compute.manager [ 1000.312691] env[61648]: Traceback (most recent call last): [ 1000.312691] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1000.312691] env[61648]: listener.cb(fileno) [ 1000.312691] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1000.312691] env[61648]: result = function(*args, **kwargs) [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1000.312691] env[61648]: return func(*args, **kwargs) [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1000.312691] env[61648]: raise e [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1000.312691] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1000.312691] env[61648]: created_port_ids = self._update_ports_for_instance( [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1000.312691] env[61648]: with excutils.save_and_reraise_exception(): [ 1000.312691] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1000.312691] env[61648]: self.force_reraise() [ 1000.312691] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1000.312691] env[61648]: raise self.value [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1000.312691] env[61648]: updated_port = self._update_port( [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1000.312691] env[61648]: _ensure_no_port_binding_failure(port) [ 1000.312691] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1000.312691] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 1000.313691] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1000.313691] env[61648]: Removing descriptor: 14 [ 1000.342233] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1000.662158] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1000.686277] env[61648]: DEBUG nova.network.neutron [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.694055] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1000.694340] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1000.694782] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.694782] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1000.694904] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.695816] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1000.696364] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1000.696364] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1000.696586] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1000.696784] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1000.696974] env[61648]: DEBUG nova.virt.hardware [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.697905] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906f7e99-61f2-4c9c-b7e2-8b06391a91da {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.709305] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17065033-d726-44ec-b620-2e8a5b788a1e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.723125] env[61648]: ERROR nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Traceback (most recent call last): [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] yield resources [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self.driver.spawn(context, instance, image_meta, [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] vm_ref = self.build_virtual_machine(instance, [ 1000.723125] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] vif_infos = vmwarevif.get_vif_info(self._session, [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] for vif in network_info: [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] return self._sync_wrapper(fn, *args, **kwargs) [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self.wait() [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self[:] = self._gt.wait() [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] return self._exit_event.wait() [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1000.723610] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] current.throw(*self._exc) [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] result = function(*args, **kwargs) [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] return func(*args, **kwargs) [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] raise e [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] nwinfo = self.network_api.allocate_for_instance( [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] created_port_ids = self._update_ports_for_instance( [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] with excutils.save_and_reraise_exception(): [ 1000.724079] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self.force_reraise() [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] raise self.value [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] updated_port = self._update_port( [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] _ensure_no_port_binding_failure(port) [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] raise exception.PortBindingFailed(port_id=port['id']) [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1000.724530] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] [ 1000.724530] env[61648]: INFO nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Terminating instance [ 1000.725398] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquiring lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.759912] env[61648]: DEBUG nova.network.neutron [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.847239] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1000.847456] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.144s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.262722] env[61648]: DEBUG oslo_concurrency.lockutils [req-3658a8f5-7fab-4c97-bd4f-17ef6bf704a9 req-ace25d02-e2b7-440b-b584-cc918a38a285 service nova] Releasing lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.265664] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquired lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.265664] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.344341] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.787876] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1001.869053] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.186957] env[61648]: DEBUG nova.compute.manager [req-d5980fd6-361a-4760-bb53-1303cc0854cb req-498a4e72-1309-4dce-93af-625e16c20770 service nova] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Received event network-vif-deleted-b64a283c-56bc-4c20-8271-bc6c09593ded {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.373023] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Releasing lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.373023] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1002.373023] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.373023] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b805ca4-8ff4-4b4b-8196-c7694d4005ba {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.383154] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bded85d4-5217-4a7e-ab3e-3ff785a4fa2a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.408632] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5fe38841-dd68-43d0-ab6a-e99137a28ec6 could not be found. [ 1002.409101] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.409432] env[61648]: INFO nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1002.409779] env[61648]: DEBUG oslo.service.loopingcall [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.410144] env[61648]: DEBUG nova.compute.manager [-] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1002.410356] env[61648]: DEBUG nova.network.neutron [-] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.426363] env[61648]: DEBUG nova.network.neutron [-] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.928932] env[61648]: DEBUG nova.network.neutron [-] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.431600] env[61648]: INFO nova.compute.manager [-] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Took 1.02 seconds to deallocate network for instance. [ 1003.433954] env[61648]: DEBUG nova.compute.claims [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1003.434154] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.434376] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.971394] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8802a0-3117-4597-ac1c-0578bb2c42e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.978960] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201b2b8c-defe-41e6-ab87-79102ac9d5bf {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.010455] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df80c767-3faf-4d68-bceb-9fe4d9757893 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.018079] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4105b2e7-fe20-4547-811d-bff4d1d259ea {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.031100] env[61648]: DEBUG nova.compute.provider_tree [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.534703] env[61648]: DEBUG nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1005.039584] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.605s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.040238] env[61648]: ERROR nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Traceback (most recent call last): [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self.driver.spawn(context, instance, image_meta, [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] vm_ref = self.build_virtual_machine(instance, [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] vif_infos = vmwarevif.get_vif_info(self._session, [ 1005.040238] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] for vif in network_info: [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] return self._sync_wrapper(fn, *args, **kwargs) [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self.wait() [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self[:] = self._gt.wait() [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] return self._exit_event.wait() [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] current.throw(*self._exc) [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1005.040637] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] result = function(*args, **kwargs) [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] return func(*args, **kwargs) [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] raise e [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] nwinfo = self.network_api.allocate_for_instance( [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] created_port_ids = self._update_ports_for_instance( [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] with excutils.save_and_reraise_exception(): [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] self.force_reraise() [ 1005.041093] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] raise self.value [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] updated_port = self._update_port( [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] _ensure_no_port_binding_failure(port) [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] raise exception.PortBindingFailed(port_id=port['id']) [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] nova.exception.PortBindingFailed: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. [ 1005.041523] env[61648]: ERROR nova.compute.manager [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] [ 1005.041523] env[61648]: DEBUG nova.compute.utils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1005.042816] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Build of instance 5fe38841-dd68-43d0-ab6a-e99137a28ec6 was re-scheduled: Binding failed for port b64a283c-56bc-4c20-8271-bc6c09593ded, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1005.043241] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1005.043469] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquiring lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.043613] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Acquired lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.043769] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.560501] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1005.631171] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.134330] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Releasing lock "refresh_cache-5fe38841-dd68-43d0-ab6a-e99137a28ec6" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.134571] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1006.134754] env[61648]: DEBUG nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1006.134922] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1006.150595] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.653725] env[61648]: DEBUG nova.network.neutron [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.156468] env[61648]: INFO nova.compute.manager [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] [instance: 5fe38841-dd68-43d0-ab6a-e99137a28ec6] Took 1.02 seconds to deallocate network for instance. [ 1008.206909] env[61648]: INFO nova.scheduler.client.report [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Deleted allocations for instance 5fe38841-dd68-43d0-ab6a-e99137a28ec6 [ 1008.714664] env[61648]: DEBUG oslo_concurrency.lockutils [None req-75deb8f8-deb1-4c18-b8e1-b4b616ff4f99 tempest-ServersNegativeTestJSON-95320281 tempest-ServersNegativeTestJSON-95320281-project-member] Lock "5fe38841-dd68-43d0-ab6a-e99137a28ec6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.928s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.454960] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquiring lock "213c65ab-fa70-4882-8819-bfbbc302c9a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.455597] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Lock "213c65ab-fa70-4882-8819-bfbbc302c9a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.961266] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1012.487070] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.487070] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.487070] env[61648]: INFO nova.compute.claims [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.526016] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861dec23-8faa-4315-931e-d7bde6b4a2c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.534130] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6257362e-6a6a-487e-a37b-d6ba54f4c079 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.563719] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aee3721-1ed6-4927-b2e8-e5d3ccc4cd0d {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.572024] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00699f9e-ba91-4ad1-adfa-547f82fc7871 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.584389] env[61648]: DEBUG nova.compute.provider_tree [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.106178] env[61648]: ERROR nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [req-83931406-8395-448e-9b4d-0ab66ffe6e55] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-83931406-8395-448e-9b4d-0ab66ffe6e55"}]} [ 1014.123038] env[61648]: DEBUG nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Refreshing inventories for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1014.134124] env[61648]: DEBUG nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updating ProviderTree inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1014.134534] env[61648]: DEBUG nova.compute.provider_tree [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.145664] env[61648]: DEBUG nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Refreshing aggregate associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, aggregates: None {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1014.163148] env[61648]: DEBUG nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Refreshing trait associations for resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64 {{(pid=61648) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1014.188427] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fda0a65-5890-4648-b23e-ff1fc1ca11e8 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.195320] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786cce3d-0f72-487b-8abf-018eb5c96984 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.225296] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c07ebfd-41b6-4ad9-9038-16e0c67b869a {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.233024] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c592d0-1b5c-41c2-a49e-01233e7e230e {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.246651] env[61648]: DEBUG nova.compute.provider_tree [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.777249] env[61648]: DEBUG nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updated inventory for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1014.778255] env[61648]: DEBUG nova.compute.provider_tree [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updating resource provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 generation from 122 to 123 during operation: update_inventory {{(pid=61648) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1014.779871] env[61648]: DEBUG nova.compute.provider_tree [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Updating inventory in ProviderTree for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.283691] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.284204] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1015.790899] env[61648]: DEBUG nova.compute.utils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1015.795768] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1015.795768] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1015.842388] env[61648]: DEBUG nova.policy [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d77904341224996961b17aa24fce456', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bbba7b3c981c48dab21dba9e9332e990', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 1016.139339] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Successfully created port: 6476f0db-09bb-41a8-9397-68b2bfee1caa {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1016.296897] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1016.804328] env[61648]: INFO nova.virt.block_device [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Booting with volume 320ea4cf-274f-495c-a0e4-4722fed4b221 at /dev/sda [ 1016.829165] env[61648]: DEBUG nova.compute.manager [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Received event network-changed-6476f0db-09bb-41a8-9397-68b2bfee1caa {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.829165] env[61648]: DEBUG nova.compute.manager [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Refreshing instance network info cache due to event network-changed-6476f0db-09bb-41a8-9397-68b2bfee1caa. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1016.829165] env[61648]: DEBUG oslo_concurrency.lockutils [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] Acquiring lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.829165] env[61648]: DEBUG oslo_concurrency.lockutils [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] Acquired lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.829576] env[61648]: DEBUG nova.network.neutron [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Refreshing network info cache for port 6476f0db-09bb-41a8-9397-68b2bfee1caa {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.849092] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8f4c53c-b224-45a7-b2ae-d9ca4349affb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.860973] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889905cc-3c34-4a65-bd21-3dd675246beb {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.885708] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef408e4b-6815-40e2-a362-27324d90b507 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.891965] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26744696-79fc-40d5-a7cc-f7cfe9564045 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.914522] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6961d3e9-74d0-492a-8957-fbaeb4f39824 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.921085] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab007502-9e90-474b-b314-24df922c6072 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.939450] env[61648]: DEBUG nova.virt.block_device [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Updating existing volume attachment record: 22b1f511-b833-4577-834f-ab349762021e {{(pid=61648) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1017.017749] env[61648]: ERROR nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1017.017749] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1017.017749] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1017.017749] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1017.017749] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1017.017749] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1017.017749] env[61648]: ERROR nova.compute.manager raise self.value [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1017.017749] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 1017.017749] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1017.017749] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1017.018353] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1017.018353] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1017.018353] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1017.018353] env[61648]: ERROR nova.compute.manager [ 1017.018353] env[61648]: Traceback (most recent call last): [ 1017.018353] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1017.018353] env[61648]: listener.cb(fileno) [ 1017.018353] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1017.018353] env[61648]: result = function(*args, **kwargs) [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1017.018353] env[61648]: return func(*args, **kwargs) [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1017.018353] env[61648]: raise e [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1017.018353] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1017.018353] env[61648]: created_port_ids = self._update_ports_for_instance( [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1017.018353] env[61648]: with excutils.save_and_reraise_exception(): [ 1017.018353] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1017.018353] env[61648]: self.force_reraise() [ 1017.018353] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1017.018353] env[61648]: raise self.value [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1017.018353] env[61648]: updated_port = self._update_port( [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1017.018353] env[61648]: _ensure_no_port_binding_failure(port) [ 1017.018353] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1017.018353] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 1017.019237] env[61648]: nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1017.019237] env[61648]: Removing descriptor: 14 [ 1017.354551] env[61648]: DEBUG nova.network.neutron [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.450264] env[61648]: DEBUG nova.network.neutron [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.951657] env[61648]: DEBUG oslo_concurrency.lockutils [req-ea7bc575-94f7-4b6d-924b-790ce684b793 req-1af70e0d-7544-4980-bb86-619546b99763 service nova] Releasing lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.857028] env[61648]: DEBUG nova.compute.manager [req-0cfcfe5d-39f0-41c3-afd5-c72067fbedd3 req-66d89433-31c3-4cd2-9e5c-ccd95963f05a service nova] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Received event network-vif-deleted-6476f0db-09bb-41a8-9397-68b2bfee1caa {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1019.061974] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1019.062803] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1019.063148] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1019.063395] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.063682] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1019.063910] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.064159] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1019.064471] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1019.064715] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1019.064978] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1019.065246] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1019.065512] env[61648]: DEBUG nova.virt.hardware [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1019.066830] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f679b9d3-5cc6-4c9e-afff-3f705cb9f254 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.079210] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc1895f-cf54-4020-b8fd-d270ede0e481 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.094879] env[61648]: ERROR nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Traceback (most recent call last): [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] yield resources [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self.driver.spawn(context, instance, image_meta, [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] vm_ref = self.build_virtual_machine(instance, [ 1019.094879] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] vif_infos = vmwarevif.get_vif_info(self._session, [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] for vif in network_info: [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] return self._sync_wrapper(fn, *args, **kwargs) [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self.wait() [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self[:] = self._gt.wait() [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] return self._exit_event.wait() [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1019.095568] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] current.throw(*self._exc) [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] result = function(*args, **kwargs) [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] return func(*args, **kwargs) [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] raise e [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] nwinfo = self.network_api.allocate_for_instance( [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] created_port_ids = self._update_ports_for_instance( [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] with excutils.save_and_reraise_exception(): [ 1019.096191] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self.force_reraise() [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] raise self.value [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] updated_port = self._update_port( [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] _ensure_no_port_binding_failure(port) [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] raise exception.PortBindingFailed(port_id=port['id']) [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1019.096841] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] [ 1019.096841] env[61648]: INFO nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Terminating instance [ 1019.098594] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquiring lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.098852] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquired lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.099151] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.617199] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1019.685459] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.188078] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Releasing lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.188635] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1020.188974] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5984f7c1-53ed-4c70-8d5f-6f802e738735 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.198362] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71de48fd-a403-4066-a82a-3b76c40fad76 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.221373] env[61648]: WARNING nova.virt.vmwareapi.driver [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 213c65ab-fa70-4882-8819-bfbbc302c9a4 could not be found. [ 1020.221609] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.221868] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab847b48-87d7-49c5-9050-38834d8e18b7 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.229744] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5b9954-e970-4e57-8794-84c411ae19e5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.251145] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 213c65ab-fa70-4882-8819-bfbbc302c9a4 could not be found. [ 1020.251368] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.251551] env[61648]: INFO nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1020.251784] env[61648]: DEBUG oslo.service.loopingcall [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.251996] env[61648]: DEBUG nova.compute.manager [-] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1020.252106] env[61648]: DEBUG nova.network.neutron [-] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1020.267286] env[61648]: DEBUG nova.network.neutron [-] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.769472] env[61648]: DEBUG nova.network.neutron [-] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.271932] env[61648]: INFO nova.compute.manager [-] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Took 1.02 seconds to deallocate network for instance. [ 1021.823670] env[61648]: INFO nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Took 0.55 seconds to detach 1 volumes for instance. [ 1021.825795] env[61648]: DEBUG nova.compute.claims [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1021.826080] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.826323] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.364315] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cbce6f-b922-4a8d-a76e-59683ee4aaac {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.373566] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f930705-c910-482a-a868-04871416fbac {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.405108] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66973e4a-c70a-41b8-b492-b62d93812aa3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.414138] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f375975-d614-4a25-8b53-3ee900b8dc93 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.428107] env[61648]: DEBUG nova.compute.provider_tree [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.931895] env[61648]: DEBUG nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.437260] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.611s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.437880] env[61648]: ERROR nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Traceback (most recent call last): [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self.driver.spawn(context, instance, image_meta, [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] vm_ref = self.build_virtual_machine(instance, [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] vif_infos = vmwarevif.get_vif_info(self._session, [ 1023.437880] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] for vif in network_info: [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] return self._sync_wrapper(fn, *args, **kwargs) [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self.wait() [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self[:] = self._gt.wait() [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] return self._exit_event.wait() [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] current.throw(*self._exc) [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1023.438408] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] result = function(*args, **kwargs) [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] return func(*args, **kwargs) [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] raise e [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] nwinfo = self.network_api.allocate_for_instance( [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] created_port_ids = self._update_ports_for_instance( [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] with excutils.save_and_reraise_exception(): [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] self.force_reraise() [ 1023.438777] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] raise self.value [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] updated_port = self._update_port( [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] _ensure_no_port_binding_failure(port) [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] raise exception.PortBindingFailed(port_id=port['id']) [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] nova.exception.PortBindingFailed: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. [ 1023.439206] env[61648]: ERROR nova.compute.manager [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] [ 1023.439206] env[61648]: DEBUG nova.compute.utils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1023.440720] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Build of instance 213c65ab-fa70-4882-8819-bfbbc302c9a4 was re-scheduled: Binding failed for port 6476f0db-09bb-41a8-9397-68b2bfee1caa, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1023.441150] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1023.441378] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquiring lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.441523] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Acquired lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.441680] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1023.964464] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1024.031592] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.534831] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Releasing lock "refresh_cache-213c65ab-fa70-4882-8819-bfbbc302c9a4" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.535205] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1024.535371] env[61648]: DEBUG nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1024.535513] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1024.550727] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.053582] env[61648]: DEBUG nova.network.neutron [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.556443] env[61648]: INFO nova.compute.manager [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] [instance: 213c65ab-fa70-4882-8819-bfbbc302c9a4] Took 1.02 seconds to deallocate network for instance. [ 1026.585193] env[61648]: INFO nova.scheduler.client.report [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Deleted allocations for instance 213c65ab-fa70-4882-8819-bfbbc302c9a4 [ 1027.092790] env[61648]: DEBUG oslo_concurrency.lockutils [None req-0d53e87f-6628-43d7-a414-e12bef1901c9 tempest-ServerActionsV293TestJSON-1056616872 tempest-ServerActionsV293TestJSON-1056616872-project-member] Lock "213c65ab-fa70-4882-8819-bfbbc302c9a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.637s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.179041] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquiring lock "fc6c894e-64a4-4bee-be34-a511500a92ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.179419] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Lock "fc6c894e-64a4-4bee-be34-a511500a92ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.682397] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Starting instance... {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1033.205677] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.205944] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.207752] env[61648]: INFO nova.compute.claims [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.243923] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e47ee1-f7b4-437d-9a05-7ab98e32694c {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.251579] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaa6e6e-82d1-4c54-9d61-b3d9dd0ef344 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.279673] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4a4c16-00e2-4eff-9ff6-b9c06667c7e3 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.286153] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005dc88d-a34c-4b78-9ea8-26eb7557e80f {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.298464] env[61648]: DEBUG nova.compute.provider_tree [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.801165] env[61648]: DEBUG nova.scheduler.client.report [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1035.307248] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.307782] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Start building networks asynchronously for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1035.812590] env[61648]: DEBUG nova.compute.utils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Using /dev/sd instead of None {{(pid=61648) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.817023] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Allocating IP information in the background. {{(pid=61648) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1035.817023] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] allocate_for_instance() {{(pid=61648) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.860741] env[61648]: DEBUG nova.policy [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f37b258cac743168219613583d7d12c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fd9e66340164a31a1f1262b6ae1a446', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61648) authorize /opt/stack/nova/nova/policy.py:201}} [ 1036.111506] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Successfully created port: b6c053d0-a237-4d1d-9b0e-e7441a7cde89 {{(pid=61648) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1036.317549] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Start building block device mappings for instance. {{(pid=61648) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1036.712460] env[61648]: DEBUG nova.compute.manager [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Received event network-changed-b6c053d0-a237-4d1d-9b0e-e7441a7cde89 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.712573] env[61648]: DEBUG nova.compute.manager [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Refreshing instance network info cache due to event network-changed-b6c053d0-a237-4d1d-9b0e-e7441a7cde89. {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1036.712773] env[61648]: DEBUG oslo_concurrency.lockutils [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] Acquiring lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.712912] env[61648]: DEBUG oslo_concurrency.lockutils [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] Acquired lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.713156] env[61648]: DEBUG nova.network.neutron [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Refreshing network info cache for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89 {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.901767] env[61648]: ERROR nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1036.901767] env[61648]: ERROR nova.compute.manager Traceback (most recent call last): [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1036.901767] env[61648]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1036.901767] env[61648]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1036.901767] env[61648]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1036.901767] env[61648]: ERROR nova.compute.manager self.force_reraise() [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1036.901767] env[61648]: ERROR nova.compute.manager raise self.value [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1036.901767] env[61648]: ERROR nova.compute.manager updated_port = self._update_port( [ 1036.901767] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1036.901767] env[61648]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1036.902307] env[61648]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1036.902307] env[61648]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1036.902307] env[61648]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1036.902307] env[61648]: ERROR nova.compute.manager [ 1036.902307] env[61648]: Traceback (most recent call last): [ 1036.902307] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1036.902307] env[61648]: listener.cb(fileno) [ 1036.902307] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1036.902307] env[61648]: result = function(*args, **kwargs) [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1036.902307] env[61648]: return func(*args, **kwargs) [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1036.902307] env[61648]: raise e [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1036.902307] env[61648]: nwinfo = self.network_api.allocate_for_instance( [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1036.902307] env[61648]: created_port_ids = self._update_ports_for_instance( [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1036.902307] env[61648]: with excutils.save_and_reraise_exception(): [ 1036.902307] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1036.902307] env[61648]: self.force_reraise() [ 1036.902307] env[61648]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1036.902307] env[61648]: raise self.value [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1036.902307] env[61648]: updated_port = self._update_port( [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1036.902307] env[61648]: _ensure_no_port_binding_failure(port) [ 1036.902307] env[61648]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1036.902307] env[61648]: raise exception.PortBindingFailed(port_id=port['id']) [ 1036.903140] env[61648]: nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1036.903140] env[61648]: Removing descriptor: 14 [ 1037.230402] env[61648]: DEBUG nova.network.neutron [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1037.299911] env[61648]: DEBUG nova.network.neutron [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.328360] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Start spawning the instance on the hypervisor. {{(pid=61648) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1037.352367] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-22T15:34:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-22T15:33:54Z,direct_url=,disk_format='vmdk',id=a3243eb3-32d0-4887-afc7-2030d2340206,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ba2520ba5ee34719a6f02d5937d25015',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-22T15:33:55Z,virtual_size=,visibility=), allow threads: False {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1037.352616] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Flavor limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1037.352768] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Image limits 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.352944] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Flavor pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1037.353106] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Image pref 0:0:0 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.353253] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61648) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1037.353456] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1037.353612] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1037.353775] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Got 1 possible topologies {{(pid=61648) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1037.353935] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1037.354122] env[61648]: DEBUG nova.virt.hardware [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61648) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1037.354990] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82929bc1-0819-49d6-a71f-a472c48b8f32 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.363114] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedbf6cc-f54f-472c-a444-6fcccb2efd66 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.376837] env[61648]: ERROR nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Traceback (most recent call last): [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] yield resources [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self.driver.spawn(context, instance, image_meta, [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] vm_ref = self.build_virtual_machine(instance, [ 1037.376837] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] for vif in network_info: [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] return self._sync_wrapper(fn, *args, **kwargs) [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self.wait() [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self[:] = self._gt.wait() [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] return self._exit_event.wait() [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1037.377304] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] current.throw(*self._exc) [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] result = function(*args, **kwargs) [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] return func(*args, **kwargs) [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] raise e [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] nwinfo = self.network_api.allocate_for_instance( [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] created_port_ids = self._update_ports_for_instance( [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] with excutils.save_and_reraise_exception(): [ 1037.377755] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self.force_reraise() [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] raise self.value [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] updated_port = self._update_port( [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] _ensure_no_port_binding_failure(port) [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] raise exception.PortBindingFailed(port_id=port['id']) [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1037.378245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] [ 1037.378245] env[61648]: INFO nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Terminating instance [ 1037.379416] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquiring lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.802203] env[61648]: DEBUG oslo_concurrency.lockutils [req-209963be-41de-441b-b24a-6dc077fb3270 req-70671c4a-8a9c-450a-8bc0-444d3dd7fb48 service nova] Releasing lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.802577] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquired lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.802763] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1038.321748] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.388472] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.739117] env[61648]: DEBUG nova.compute.manager [req-be097d57-b28c-4176-aa9f-5fe311026719 req-16853f7d-da69-4af3-a484-1296154108ee service nova] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Received event network-vif-deleted-b6c053d0-a237-4d1d-9b0e-e7441a7cde89 {{(pid=61648) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.891347] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Releasing lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.891776] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Start destroying the instance on the hypervisor. {{(pid=61648) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1038.891973] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Destroying instance {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1038.892290] env[61648]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c48d3d81-445a-4cf6-a990-c87617181287 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.901598] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a8ae5a-5f6b-496b-b5ff-c35dba2db358 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.922634] env[61648]: WARNING nova.virt.vmwareapi.vmops [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fc6c894e-64a4-4bee-be34-a511500a92ba could not be found. [ 1038.922856] env[61648]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance destroyed {{(pid=61648) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.923051] env[61648]: INFO nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1038.923445] env[61648]: DEBUG oslo.service.loopingcall [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61648) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.923700] env[61648]: DEBUG nova.compute.manager [-] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1038.923797] env[61648]: DEBUG nova.network.neutron [-] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1038.939078] env[61648]: DEBUG nova.network.neutron [-] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1039.441698] env[61648]: DEBUG nova.network.neutron [-] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.944070] env[61648]: INFO nova.compute.manager [-] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Took 1.02 seconds to deallocate network for instance. [ 1039.947164] env[61648]: DEBUG nova.compute.claims [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Aborting claim: {{(pid=61648) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1039.947462] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.947820] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.487922] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba4b49-ad67-484e-99b5-ac83744d5a1b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.495500] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a0f824-43f7-4fba-a45f-27b72449325b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.524240] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66935b5-6f71-47e7-9c24-1d5769b359e5 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.530962] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8627c9a-1f66-4bc8-95e5-aab0f9377475 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.546384] env[61648]: DEBUG nova.compute.provider_tree [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.049456] env[61648]: DEBUG nova.scheduler.client.report [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.554528] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.607s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.555245] env[61648]: ERROR nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Traceback (most recent call last): [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self.driver.spawn(context, instance, image_meta, [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] vm_ref = self.build_virtual_machine(instance, [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 1041.555245] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] for vif in network_info: [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] return self._sync_wrapper(fn, *args, **kwargs) [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self.wait() [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self[:] = self._gt.wait() [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] return self._exit_event.wait() [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] current.throw(*self._exc) [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1041.555653] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] result = function(*args, **kwargs) [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] return func(*args, **kwargs) [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] raise e [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] nwinfo = self.network_api.allocate_for_instance( [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] created_port_ids = self._update_ports_for_instance( [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] with excutils.save_and_reraise_exception(): [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] self.force_reraise() [ 1041.556121] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] raise self.value [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] updated_port = self._update_port( [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] _ensure_no_port_binding_failure(port) [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] raise exception.PortBindingFailed(port_id=port['id']) [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] nova.exception.PortBindingFailed: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. [ 1041.556560] env[61648]: ERROR nova.compute.manager [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] [ 1041.556560] env[61648]: DEBUG nova.compute.utils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. {{(pid=61648) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1041.557884] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Build of instance fc6c894e-64a4-4bee-be34-a511500a92ba was re-scheduled: Binding failed for port b6c053d0-a237-4d1d-9b0e-e7441a7cde89, please check neutron logs for more information. {{(pid=61648) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1041.558302] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Unplugging VIFs for instance {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1041.558528] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquiring lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.558675] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Acquired lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.558835] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Building network info cache for instance {{(pid=61648) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.078647] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1042.147301] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.649992] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Releasing lock "refresh_cache-fc6c894e-64a4-4bee-be34-a511500a92ba" {{(pid=61648) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.650373] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61648) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1042.650373] env[61648]: DEBUG nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Deallocating network for instance {{(pid=61648) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1042.650536] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] deallocate_for_instance() {{(pid=61648) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1042.665994] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Instance cache missing network info. {{(pid=61648) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.168677] env[61648]: DEBUG nova.network.neutron [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Updating instance_info_cache with network_info: [] {{(pid=61648) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.672242] env[61648]: INFO nova.compute.manager [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] [instance: fc6c894e-64a4-4bee-be34-a511500a92ba] Took 1.02 seconds to deallocate network for instance. [ 1044.702505] env[61648]: INFO nova.scheduler.client.report [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Deleted allocations for instance fc6c894e-64a4-4bee-be34-a511500a92ba [ 1045.212929] env[61648]: DEBUG oslo_concurrency.lockutils [None req-e77b5f0d-f0fa-4c07-9777-68ca6e03ba99 tempest-ServerRescueTestJSON-347233784 tempest-ServerRescueTestJSON-347233784-project-member] Lock "fc6c894e-64a4-4bee-be34-a511500a92ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.033s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.855752] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.860237] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.860237] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Cleaning up deleted instances with incomplete migration {{(pid=61648) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1052.362795] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.362795] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.362795] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61648) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1053.864270] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.864270] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Cleaning up deleted instances {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1054.368023] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] There are 4 instances to clean {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1054.368023] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: e4adb624-e900-4838-a5c5-2cd0d488f458] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1054.872082] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: acc5b6cb-16ee-4756-9088-fa094eb83daa] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1055.375477] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: 32a2c7ce-2980-4eac-ad52-b8d5d67d669b] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1055.879560] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] [instance: b9130bac-f92b-4208-b84c-852f4a269153] Instance has had 0 of 5 cleanup attempts {{(pid=61648) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1057.383704] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.383704] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Starting heal instance info cache {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1057.383704] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Rebuilding the list of instances to heal {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1057.888321] env[61648]: DEBUG nova.compute.manager [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Didn't find any instances for network info cache update. {{(pid=61648) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1057.888321] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.888321] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.888321] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.888321] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.888321] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.392152] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.392152] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.392152] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.392152] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61648) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1058.393090] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6428238e-e39a-40e7-b93b-b35c23f65d5b {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.401676] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fb2d3b-c8c8-480b-a9fa-8e7cc2f50c16 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.415470] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ea341b-ef33-4702-843c-14dd76c6f2f4 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.421742] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0a41c8-8beb-4591-ad72-cf48bcb043c6 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.450619] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181417MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=61648) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1058.450774] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.450951] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.599548] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1059.599801] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61648) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1059.613073] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d0124b-e0d4-4b8d-9c97-6c06e9965d20 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.620382] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e911c2a9-63fb-46f6-bddf-01f62adc7c78 {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.650146] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec93744-b0dd-4f02-bb2d-509276a9a0cc {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.656760] env[61648]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90396990-9c2d-4036-94be-ac01f1a4a7cd {{(pid=61648) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.669412] env[61648]: DEBUG nova.compute.provider_tree [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed in ProviderTree for provider: 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 {{(pid=61648) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.172311] env[61648]: DEBUG nova.scheduler.client.report [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Inventory has not changed for provider 1f7892d3-7c3b-4065-88c0-a2c5eb65d4d0 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61648) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1060.678118] env[61648]: DEBUG nova.compute.resource_tracker [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61648) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1060.678485] env[61648]: DEBUG oslo_concurrency.lockutils [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.227s {{(pid=61648) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.859523] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.357809] env[61648]: DEBUG oslo_service.periodic_task [None req-13f06054-2c8d-4808-9726-1ae6d04a341e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61648) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}